diff --git a/GRADLE.CHEATSHEET.asciidoc b/GRADLE.CHEATSHEET.asciidoc deleted file mode 100644 index 3ffe77403d3..00000000000 --- a/GRADLE.CHEATSHEET.asciidoc +++ /dev/null @@ -1,61 +0,0 @@ -To compile `x-plugins`, you must clone the Elasticsearch repository into the same parent directory. For example: - -[source,bash] ----- -$ mkdir elastic -$ cd elastic -$ git clone git@github.com:elastic/elasticsearch.git -$ git clone git@github.com:elastic/x-plugins.git -$ git clone git@github.com:elastic/kibana.git <1> ----- -<1> For anyone doing UI development, it's also useful to have Kibana at the same level. - -Once cloned, any command should be executed from the **elasticsearch** directory. This ensures that the full dependency tree is available. - -[source,bash] ----- -$ cd elasticsearch -$ gradle clean test check <1> ----- -<1> This will run the `clean` task, `test` task, and then the `check` task on _every_ project that has it. However, `check` requires that `test` be run, so it won't _rerun_ `test`. `clean` is unnecessary here, but people often use it anyway. - -If this command were run in a different order, then it would still follow the same rules, but the behavior would change: - -[source,bash] ----- -$ gradle check test clean <1> ----- -<1> It would run every task that `check` requires (e.g., `test` and `integTest`), skip `test` because it has already been run (indirectly by `check`), and then finally it would _wastefully_ delete every project output. - -As a quick helper, below are the equivalent commands from `maven` to `gradle`. You can also run `gradle tasks` to see all tasks that are available to run. - -[cols="3*", options="header"] -|==== -| Maven | Gradle | Description -| `clean` | `clean` | Delete anything that exists already. You do _not_ generally need to run `clean` with Gradle for any task that _Gradle_ manages the inputs/outputs (in other words, it knows when it needs to rebuild versus reuse). -| `test` | `test` | Run all unit tests. -| `verify` | `check` | Run all tests, plus extra checks (e.g., `checkStyle`, `forbiddenApis`, etc.). -| `verify -Dskip.unit.tests` | `integTest` | Run only integration tests. -| `package -DskipTests` | `assemble` | Output is in `${project.projectDir}/build/distributions` -| `install -DskipTests` | `install` | Build jars and place them into the local _Maven_ repository (yes, even with Gradle). - -This should be unnecessary with the unified build! -|==== - -The full task list, with a minor breakout as a graph of dependencies can be seen with: - -[source,bash] ----- -$ gradle tasks --all ----- - -Given that we currently have 80 projects, this can be extremely verbose. - -With Gradle, you can easily target specific `projects` to run commands against, and it will build all necessary dependencies to make it happen. For example, if you make a change to a specific test in the `x-pack` subproject, then you can specifically invoke its `test` task. - -[source,bash] ----- -$ gradle :x-plugins:elasticsearch:x-pack:test -Dtests.class=*YourTests ----- - -This applies to any command that follows the Directed Acyclic Graph (DAG) for its dependencies. The above example would trigger Elasticsearch `core` to be built, as well as the test framework and any other dependencies that it may have. diff --git a/build.gradle b/build.gradle index ba38530a631..e3a6d6cc4b2 100644 --- a/build.gradle +++ b/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.BuildPlugin +import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.precommit.LicenseHeadersTask @@ -8,8 +9,14 @@ if (project.projectDir.name != 'x-pack-elasticsearch') { task wrapper(type: Wrapper) +Project xpackRootProject = project + subprojects { group = 'org.elasticsearch.plugin' + ext.xpackRootProject = xpackRootProject + ext.xpackProject = { String projectName -> xpackRootProject.project(projectName) } + // helper method to find the path to a module + ext.xpackModule = { String moduleName -> xpackProject("plugin:${moduleName}").path } plugins.withType(MavenPublishPlugin).whenPluginAdded { publishing { @@ -34,8 +41,13 @@ subprojects { } plugins.withType(BuildPlugin).whenPluginAdded { - project.licenseFile = project(':x-pack-elasticsearch').file('LICENSE.txt') - project.noticeFile = project(':x-pack-elasticsearch').file('NOTICE.txt') + project.licenseFile = xpackRootProject.file('LICENSE.txt') + project.noticeFile = xpackRootProject.file('NOTICE.txt') + } + + plugins.withType(PluginBuildPlugin).whenPluginAdded { + project.esplugin.licenseFile = xpackRootProject.file('LICENSE.txt') + project.esplugin.noticeFile = xpackRootProject.file('NOTICE.txt') } } @@ -53,16 +65,16 @@ subprojects { approvedLicenses = ['Elasticsearch Confidential', 'Generated'] additionalLicense 'ESCON', 'Elasticsearch Confidential', 'ELASTICSEARCH CONFIDENTIAL' } - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-api:${version}": ':x-pack-elasticsearch:plugin:core'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-core:${version}": ':x-pack-elasticsearch:plugin:core'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-deprecation:${version}": ':x-pack-elasticsearch:plugin:deprecation'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-graph:${version}": ':x-pack-elasticsearch:plugin:graph'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-logstash:${version}": ':x-pack-elasticsearch:plugin:logstash'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-ml:${version}": ':x-pack-elasticsearch:plugin:ml'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-monitoring:${version}": ':x-pack-elasticsearch:plugin:monitoring'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-security:${version}": ':x-pack-elasticsearch:plugin:security'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-upgrade:${version}": ':x-pack-elasticsearch:plugin:upgrade'] - ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-watcher:${version}": ':x-pack-elasticsearch:plugin:watcher'] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-api:${version}": xpackModule('core')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-core:${version}": xpackModule('core')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-deprecation:${version}": xpackModule('deprecation')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-graph:${version}": xpackModule('graph')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-logstash:${version}": xpackModule('logstash')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-ml:${version}": xpackModule('ml')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-monitoring:${version}": xpackModule('monitoring')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-security:${version}": xpackModule('security')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-upgrade:${version}": xpackModule('upgrade')] + ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-watcher:${version}": xpackModule('watcher')] for (final Version version : versionCollection.versionsIndexCompatibleWithCurrent) { if (version.branch != null) { diff --git a/docs/build.gradle b/docs/build.gradle index a8ebfd3d487..09d0a8727cc 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -19,6 +19,7 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/ml/functions/time.asciidoc', 'en/ml/aggregations.asciidoc', 'en/ml/customurl.asciidoc', + 'en/monitoring/indices.asciidoc', 'en/rest-api/security/ssl.asciidoc', 'en/rest-api/security/users.asciidoc', 'en/rest-api/security/tokens.asciidoc', @@ -94,9 +95,9 @@ buildRestTests.expectedUnconvertedCandidates = [ ] dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> @@ -141,7 +142,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.security.authc.token.enabled', 'true' // Disable monitoring exporters for the docs tests setting 'xpack.monitoring.exporters._local.type', 'local' @@ -336,41 +337,7 @@ setups['server_metrics_job'] = ''' } } ''' -setups['server_metrics_openjob'] = ''' - - do: - indices.create: - index: server-metrics - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - metric: - properties: - timestamp: - type: date - total: - type: long - - do: - xpack.ml.put_job: - job_id: "total-requests" - body: > - { - "description" : "Total sum of requests", - "analysis_config" : { - "bucket_span":"10m", - "detectors" :[ - { - "detector_description": "Sum of total", - "function": "sum", - "field_name": "total" - } - ]}, - "data_description" : { - "time_field":"timestamp", - "time_format": "epoch_ms" - } - } +setups['server_metrics_openjob'] = setups['server_metrics_job'] + ''' - do: xpack.ml.put_datafeed: datafeed_id: "datafeed-total-requests" @@ -384,54 +351,7 @@ setups['server_metrics_openjob'] = ''' xpack.ml.open_job: job_id: "total-requests" ''' - -setups['server_metrics_startdf'] = ''' - - do: - indices.create: - index: server-metrics - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - metric: - properties: - timestamp: - type: date - total: - type: long - - do: - xpack.ml.put_job: - job_id: "total-requests" - body: > - { - "description" : "Total sum of requests", - "analysis_config" : { - "bucket_span":"10m", - "detectors" :[ - { - "detector_description": "Sum of total", - "function": "sum", - "field_name": "total" - } - ]}, - "data_description" : { - "time_field":"timestamp", - "time_format": "epoch_ms" - } - } - - do: - xpack.ml.put_datafeed: - datafeed_id: "datafeed-total-requests" - body: > - { - "job_id":"total-requests", - "indexes":"server-metrics", - "types":"metric" - } - - do: - xpack.ml.open_job: - job_id: "total-requests" +setups['server_metrics_startdf'] = setups['server_metrics_openjob'] + ''' - do: xpack.ml.start_datafeed: datafeed_id: "datafeed-total-requests" diff --git a/docs/en/ml/calendars.asciidoc b/docs/en/ml/calendars.asciidoc index 9a9590d1d47..117ed5cb42c 100644 --- a/docs/en/ml/calendars.asciidoc +++ b/docs/en/ml/calendars.asciidoc @@ -26,6 +26,8 @@ iCalendar (`.ics`) file in {kib} or a JSON file in the [NOTE] -- +* You must identify scheduled events before your job analyzes the data for that +time period. Machine learning results are not updated retroactively. * If your iCalendar file contains recurring events, only the first occurrence is imported. * Bucket results are generated during scheduled events but they have an diff --git a/docs/en/ml/troubleshooting.asciidoc b/docs/en/ml/troubleshooting.asciidoc index f8070a44b19..22bd285e6e0 100644 --- a/docs/en/ml/troubleshooting.asciidoc +++ b/docs/en/ml/troubleshooting.asciidoc @@ -61,3 +61,26 @@ By default, {ml} results are stored in the `.ml-anomalies-shared` index in {es}. To resolve this issue, click *Advanced > Use dedicated index* when you create the job in {kib}. If you are using the create job API, specify an index name in the `results_index_name` property. + +[[ml-jobnames]] +=== {kib} cannot display jobs with invalid characters in their name + +This problem occurs when you create a job by using the +{ref}/ml-put-job.html[Create Jobs API] then try to view that job in {kib}. In +particular, the problem occurs when you use a period(.) in the job identifier. + +*Symptoms:* + +* When you try to open a job (named, for example, `job.test` in the +**Anomaly Explorer** or the **Single Metric Viewer**, the job name is split and +the text after the period is assumed to be the job name. If a job does not exist +with that abbreviated name, an error occurs. For example: +`Warning Requested job test does not exist`. If a job exists with that +abbreviated name, it is displayed. + +*Resolution:* + +Create jobs in {kib} or ensure that you create jobs with valid identifiers when +you use the {ml} APIs. For more information about valid identifiers, see +{ref}/ml-put-job.html[Create Jobs API] or +{ref}/ml-job-resource.html[Job Resources]. diff --git a/docs/en/monitoring/configuring-monitoring.asciidoc b/docs/en/monitoring/configuring-monitoring.asciidoc new file mode 100644 index 00000000000..41481f214c8 --- /dev/null +++ b/docs/en/monitoring/configuring-monitoring.asciidoc @@ -0,0 +1,16 @@ +[role="xpack"] +[[configuring-monitoring]] +== Configuring Monitoring in {es} +++++ +Configuring Monitoring +++++ + +{monitoring} is enabled by default when you install {xpack}. Advanced monitoring +settings enable you to control how frequently data is collected, configure +timeouts, and set the retention period for locally-stored monitoring indices. You +can also adjust how monitoring data is displayed. For more information, see +<>. + +include::indices.asciidoc[] +include::tribe.asciidoc[] +include::{xes-repo-dir}/settings/monitoring-settings.asciidoc[] diff --git a/docs/en/monitoring/http-export.asciidoc b/docs/en/monitoring/http-export.asciidoc index 804648a5d9d..72bece5ca20 100644 --- a/docs/en/monitoring/http-export.asciidoc +++ b/docs/en/monitoring/http-export.asciidoc @@ -12,7 +12,7 @@ through the network. The `http` exporter supports a number of settings that control how it communicates over HTTP to remote clusters. In most cases, it is not necessary to explicitly configure these settings. For detailed -descriptions, see {ref}/monitoring-settings.html[Monitoring Settings]. +descriptions, see <>. [source,yaml] ---------------------------------- diff --git a/docs/en/monitoring/index.asciidoc b/docs/en/monitoring/index.asciidoc index d63a92d53cd..ed5ec3b3cb8 100644 --- a/docs/en/monitoring/index.asciidoc +++ b/docs/en/monitoring/index.asciidoc @@ -20,4 +20,3 @@ introduction to monitoring your Elastic stack, including Logstash and {kib}, see include::stats-export.asciidoc[] include::http-export.asciidoc[] -include::tribe.asciidoc[] diff --git a/docs/en/monitoring/indices.asciidoc b/docs/en/monitoring/indices.asciidoc new file mode 100644 index 00000000000..10d2c212de2 --- /dev/null +++ b/docs/en/monitoring/indices.asciidoc @@ -0,0 +1,42 @@ +[role="xpack"] +[[config-monitoring-indices]] +=== Configuring Indices for Monitoring + +<> are used to configure the indices +that store the monitoring data collected from a cluster. + +You can retrieve the templates through the `_template` API: + +[source,sh] +---------------------------------- +GET /_template/.monitoring-* +---------------------------------- + +By default, the template configures one shard and one replica for the +monitoring indices. To override the default settings, add your own template: + +. Set the `template` pattern to `.monitoring-*`. +. Set the template `order` to `1`. This ensures your template is +applied after the default template, which has an order of 0. +. Specify the `number_of_shards` and/or `number_of_replicas` in the `settings` +section. + +For example, the following template increases the number of shards to five +and the number of replicas to two. + +[source,js] +---------------------------------- +PUT /_template/custom_monitoring +{ + "index_patterns": ".monitoring-*", + "order": 1, + "settings": { + "number_of_shards": 5, + "number_of_replicas": 2 + } +} +---------------------------------- + +IMPORTANT: Only set the `number_of_shards` and `number_of_replicas` in the +settings section. Overriding other monitoring template settings could cause +your monitoring dashboards to stop working correctly. diff --git a/docs/en/monitoring/tribe.asciidoc b/docs/en/monitoring/tribe.asciidoc index 506e1e3591a..8dfc743f04b 100644 --- a/docs/en/monitoring/tribe.asciidoc +++ b/docs/en/monitoring/tribe.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] [[monitoring-tribe]] -== Configuring a Tribe Node to Work with Monitoring +=== Configuring a Tribe Node to Work with Monitoring If you connect to a cluster through a <>, and you want to monitor the tribe node, then you will need to install {xpack} on diff --git a/docs/en/rest-api/ml/jobresource.asciidoc b/docs/en/rest-api/ml/jobresource.asciidoc index f8518d7b0a4..7b788bf34af 100644 --- a/docs/en/rest-api/ml/jobresource.asciidoc +++ b/docs/en/rest-api/ml/jobresource.asciidoc @@ -49,7 +49,9 @@ so do not set the `background_persist_interval` value too low. many. For example, `["group1", "group2"]`. `job_id`:: - (string) The unique identifier for the job. + (string) The unique identifier for the job. This identifier can contain + lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It + must start and end with alphanumeric characters. `job_type`:: (string) Reserved for future use, currently set to `anomaly_detector`. diff --git a/docs/en/rest-api/ml/put-job.asciidoc b/docs/en/rest-api/ml/put-job.asciidoc index 975b87c6f1e..e62504a9b0d 100644 --- a/docs/en/rest-api/ml/put-job.asciidoc +++ b/docs/en/rest-api/ml/put-job.asciidoc @@ -16,7 +16,9 @@ This API enables you to instantiate a job. ==== Path Parameters `job_id` (required):: - (string) Identifier for the job + (string) Identifier for the job. This identifier can contain lowercase + alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must + start and end with alphanumeric characters. ==== Request Body diff --git a/docs/en/security/authentication/custom-realm.asciidoc b/docs/en/security/authentication/custom-realm.asciidoc index 57c4f460de6..a7df6f5ff86 100644 --- a/docs/en/security/authentication/custom-realm.asciidoc +++ b/docs/en/security/authentication/custom-realm.asciidoc @@ -3,7 +3,8 @@ If you are using an authentication system that is not supported out-of-the-box by {security}, you can create a custom realm to interact with it to authenticate -users. You implement a custom realm as an {xpack} extension. +users. You implement a custom realm as an SPI loaded security extension +as part of an ordinary elasticsearch plugin. [[implementing-custom-realm]] ==== Implementing a Custom Realm @@ -25,7 +26,7 @@ To create a custom realm, you need to: To package your custom realm as a plugin: . Implement an extension class for your realm that extends - `org.elasticsearch.xpack.extensions.XPackExtension`. There you need to + `org.elasticsearch.xpack.core.security.SecurityExtension`. There you need to override one or more of the following methods: + [source,java] @@ -54,29 +55,18 @@ in certain authentication failure events. [source,java] ---------------------------------------------------- @Override -public Collection getRestHeaders() { - ... -} ----------------------------------------------------- -+ -The `getRestHeaders` method returns a collection of header names that should be -copied from the request into the `ThreadContext` where they can be accessed by -the realm. -+ -[source,java] ----------------------------------------------------- -@Override public List getSettingsFilter() { ... } ---------------------------------------------------- + -The `getSettingsFilter` method returns a list of setting names that should be -filtered from the settings APIs as they may contain sensitive credentials. +The `Plugin#getSettingsFilter` method returns a list of setting names that should be +filtered from the settings APIs as they may contain sensitive credentials. Note this method is not +part of the `SecurityExtension` interface, it's available as part of the elasticsearch plugin main class. . Create a build configuration file for the plugin; Gradle is our recommendation. -. Create a `x-pack-extension-descriptor.properties` descriptor file for the - extension. +. Create a `META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension` descriptor file for the + extension that contains the fully qualified class name of your `org.elasticsearch.xpack.core.security.SecurityExtension` implementation . Bundle all in a single zip file. [[using-custom-realm]] @@ -85,12 +75,12 @@ filtered from the settings APIs as they may contain sensitive credentials. To use a custom realm: . Install the realm extension on each node in the cluster. You run - `bin/x-pack/extension` with the `install` sub-command and specify the URL + `bin/elasticsearch-plugin` with the `install` sub-command and specify the URL pointing to the zip file that contains the extension. For example: + [source,shell] ---------------------------------------- -bin/x-pack/extension install file:////my-realm-1.0.zip +bin/elasticsearch-plugin install file:////my-realm-1.0.zip ---------------------------------------- . Add a realm configuration of the appropriate realm type to `elasticsearch.yml` diff --git a/docs/en/security/authorization/custom-roles-provider.asciidoc b/docs/en/security/authorization/custom-roles-provider.asciidoc index b18d181aa37..9056467ced9 100644 --- a/docs/en/security/authorization/custom-roles-provider.asciidoc +++ b/docs/en/security/authorization/custom-roles-provider.asciidoc @@ -3,7 +3,8 @@ If you need to retrieve user roles from a system not supported out-of-the-box by {security}, you can create a custom roles provider to retrieve and resolve -roles. You implement a custom roles provider as an {xpack} extension. +roles. You implement a custom roles provider as an SPI loaded security extension +as part of an ordinary elasticsearch plugin. [[implementing-custom-roles-provider]] ==== Implementing a Custom Roles Provider @@ -22,8 +23,8 @@ To create a custom roles provider: To package your custom roles provider as a plugin: -. Implement an extension class for your roles provider that extends - `org.elasticsearch.xpack.core.extensions.XPackExtension`. There you need to +. Implement an extension class for your roles provider that implements + `org.elasticsearch.xpack.core.security.SecurityExtension`. There you need to override one or more of the following methods: + [source,java] @@ -51,12 +52,13 @@ public List getSettingsFilter() { } ---------------------------------------------------- + -The `getSettingsFilter` method returns a list of setting names that should be -filtered from the settings APIs as they may contain sensitive credentials. +The `Plugin#getSettingsFilter` method returns a list of setting names that should be +filtered from the settings APIs as they may contain sensitive credentials. Note this method is not +part of the `SecurityExtension` interface, it's available as part of the elasticsearch plugin main class. . Create a build configuration file for the plugin; Gradle is our recommendation. -. Create a `x-pack-extension-descriptor.properties` descriptor file for the - extension. +. Create a `META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension` descriptor file for the + extension that contains the fully qualified class name of your `org.elasticsearch.xpack.core.security.SecurityExtension` implementation . Bundle all in a single zip file. [[using-custom-roles-provider]] @@ -65,12 +67,12 @@ filtered from the settings APIs as they may contain sensitive credentials. To use a custom roles provider: . Install the roles provider extension on each node in the cluster. You run - `bin/x-pack/extension` with the `install` sub-command and specify the URL + `bin/elasticsearch-plugin` with the `install` sub-command and specify the URL pointing to the zip file that contains the extension. For example: + [source,shell] ---------------------------------------- -bin/x-pack/extension install file:////my-roles-provider-1.0.zip +bin/elasticsearch-plugin install file:////my-roles-provider-1.0.zip ---------------------------------------- . Add any configuration parameters for any of the custom roles provider implementations diff --git a/docs/en/settings/configuring-xes.asciidoc b/docs/en/settings/configuring-xes.asciidoc index 535b3b9ee1a..29c6b95dddf 100644 --- a/docs/en/settings/configuring-xes.asciidoc +++ b/docs/en/settings/configuring-xes.asciidoc @@ -8,7 +8,5 @@ include::{asciidoc-dir}/../../shared/settings.asciidoc[] include::license-settings.asciidoc[] include::ml-settings.asciidoc[] -include::monitoring-settings.asciidoc[] -//include::security-settings.asciidoc[] include::notification-settings.asciidoc[] include::sql-settings.asciidoc[] diff --git a/docs/en/settings/monitoring-settings.asciidoc b/docs/en/settings/monitoring-settings.asciidoc index fd4e4a3cf42..cd567ac5d5b 100644 --- a/docs/en/settings/monitoring-settings.asciidoc +++ b/docs/en/settings/monitoring-settings.asciidoc @@ -95,7 +95,7 @@ local exporter that indexes monitoring data on the cluster where it is installed Use an HTTP exporter to send data to a separate monitoring cluster. For more information, see <>, <>, and -{xpack-ref}/monitoring-cluster.html[Setting up a Separate Monitoring Cluster]. +{xpack-ref}/how-monitoring-works.html[How Monitoring Works]. [float] [[local-exporter-settings]] diff --git a/docs/en/setup/setup-xes.asciidoc b/docs/en/setup/setup-xes.asciidoc index f9aaafee0f6..dae3dba6092 100644 --- a/docs/en/setup/setup-xes.asciidoc +++ b/docs/en/setup/setup-xes.asciidoc @@ -11,6 +11,7 @@ easy-to-install package. To access this functionality, you must -- include::installing-xes.asciidoc[] +include::{xes-repo-dir}/monitoring/configuring-monitoring.asciidoc[] include::{xes-repo-dir}/security/configuring-es.asciidoc[] include::setup-xclient.asciidoc[] include::{xes-repo-dir}/settings/configuring-xes.asciidoc[] diff --git a/license-tools/build.gradle b/license-tools/build.gradle index 125f09cf932..3ef08073bbf 100644 --- a/license-tools/build.gradle +++ b/license-tools/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'elasticsearch.build' dependencies { - compile project(':x-pack-elasticsearch:plugin:core') + compile project(xpackModule('core')) compile "org.elasticsearch:elasticsearch:${version}" testCompile "org.elasticsearch.test:framework:${version}" } diff --git a/license-tools/dev-tools/integration-tests.xml b/license-tools/dev-tools/integration-tests.xml deleted file mode 100644 index 153253fd27d..00000000000 --- a/license-tools/dev-tools/integration-tests.xml +++ /dev/null @@ -1,122 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/license-tools/sample/license_spec.json b/license-tools/sample/license_spec.json deleted file mode 100644 index cff074c0e7d..00000000000 --- a/license-tools/sample/license_spec.json +++ /dev/null @@ -1 +0,0 @@ -{"licenses":[{"uid": "893361dc-9749-4997-93cb-802e3d7fa4a8", "type":"basic","issued_to":"issuedTo","issuer":"issuer","issue_date":"2014-09-29","expiry_date":"2030-08-29","max_nodes":1}]} diff --git a/migrate-issues.py b/migrate-issues.py deleted file mode 100644 index a117546b95d..00000000000 --- a/migrate-issues.py +++ /dev/null @@ -1,194 +0,0 @@ -import argparse -import calendar -import json -import re -import requests -import shutil -import subprocess -import tempfile -import time - -def throttle(response): - if 'X-RateLimit-Remaining' in response.headers: - rate_limit_remaining = int(response.headers['X-RateLimit-Remaining']) - if rate_limit_remaining < 16: - rate_limit_reset = int(response.headers['X-RateLimit-Reset']) - delay = rate_limit_reset - calendar.timegm(time.gmtime()) - print('rate limit remaining: {}, rate limit reset: {}, throttling for {} seconds'.format(rate_limit_remaining, rate_limit_reset, delay)) - if delay >= 0: - time.sleep(delay) - - return response - -def authorization_token(token): - return {'Authorization':'token {}'.format(token)} - -def next_url(response): - """Return the next URL following the Link header, otherwise None.""" - nu = None - if 'Link' in response.headers: - links = response.headers['Link'].split(',') - for link in links: - if 'rel="next"' in link: - nu = link.split(';')[0][1:-1] - break - - return nu - -def get(url, token): - """Return the response for the specified URL.""" - headers = authorization_token(token) - return throttle(requests.get(url, headers=headers)) - -def get_all(url, token): - """Returns all pages starting at the specified URL.""" - items = [] - while url is not None: - response = get(url, token) - json = response.json() - if json: - items.extend(response.json()) - url = next_url(response) - else: - url = None - - return items - -def issue_comments(source_owner, source_repo, issue, token): - """Return all issue comments.""" - url = 'https://api.github.com/repos/{}/{}/issues/{}/comments'.format(source_owner, source_repo, issue) - return get_all(url, token) - -assignees_cache = {} - -def repo_assignees(assignee, owner, repo, token): - """Returns True if the assignee is valid for the specified owner/repo, otherwise False.""" - if assignee in assignees_cache: - return assignees_cache[assignee] - url = 'https://api.github.com/repos/{}/{}/assignees/{}'.format(owner, repo, assignee) - response = get(url, token) - assignees_cache[assignee] = response.status_code == 204 - return assignees_cache[assignee] - -def rewrite_issue_links(text, source_owner, source_repo): - return re.sub(r"(\s+)(#\d+)", "\\1{}/{}\\2".format(source_owner, source_repo), text) - -def rewrite_commits(text, source_owner, source_repo, temp): - commits = [] - for match in re.finditer(r"(? - link=$(expr "$ls" : '.*-> \(.*\)$') - if expr "$link" : '/.*' > /dev/null; then - SCRIPT="$link" - else - SCRIPT=$(dirname "$SCRIPT")/"$link" - fi -done - -# determine base directory -BASE_DIR=$(dirname "$SCRIPT")/.. - -# make BASE_DIR absolute -BASE_DIR=$(cd "$BASE_DIR"; pwd) - -PARENT_DIR=$(cd "$BASE_DIR"/../..; pwd) - -# go to the parent directory -cd $PARENT_DIR - -if [ -z ${USE_EXISTING_ES:+x} ]; then - if [ -d "./elasticsearch" ]; then - echo "I expected a clean workspace but an 'elasticsearch' sibling directory already exists in [$PARENT_DIR]!" - echo - echo "Either define 'USE_EXISTING_ES' or remove the existing 'elasticsearch' sibling." - exit 1 - fi - BRANCH=${PR_SOURCE_BRANCH:-${GIT_BRANCH#*/}} # GIT_BRANCH starts with the repo, i.e., origin/master - BRANCH=${BRANCH:-master} # fall back to CI branch if not testing a PR - echo "Checking if branch '$BRANCH' has elasticsearch sibling..." - if [[ -z "$(git ls-remote --heads https://github.com/elastic/elasticsearch.git $BRANCH)" ]]; then - echo "No sibling branch, using PR target branch" - BRANCH=$PR_TARGET_BRANCH - fi - echo "Checking out Elasticsearch '$BRANCH' branch..." - git clone -b $BRANCH https://github.com/elastic/elasticsearch.git --depth=1 - printf "Checked out Elasticsearch revision: %s\n" "$(git -C elasticsearch rev-parse HEAD)" -else - if [ -d "./elasticsearch" ]; then - echo "Using existing 'elasticsearch' checkout" - else - echo "You have defined 'USE_EXISTING_ES' but no existing Elasticsearch directory exists!" - exit 2 - fi -fi - -# back to base directory -cd "$BASE_DIR" - -echo "Running x-pack-elasticsearch tests..." -echo "Running in $PWD" - -# output the commands -set -xuf - -# clean -gradle --stacktrace clean - -# Actually run the tests -gradle "${GRADLE_CLI_ARGS[@]}" - -# ~*~ shell-script-mode ~*~ diff --git a/migrate/x-pack-kibana/LICENSE.txt b/migrate/x-pack-kibana/LICENSE.txt deleted file mode 100644 index f4167e00f71..00000000000 --- a/migrate/x-pack-kibana/LICENSE.txt +++ /dev/null @@ -1,120 +0,0 @@ -COMMERCIAL SOFTWARE END USER LICENSE AGREEMENT - - READ THIS COMMERCIAL SOFTWARE END USER LICENSE AGREEMENT CAREFULLY, WHICH CONSTITUTES A LEGALLY BINDING AGREEMENT AND GOVERNS YOUR USE OF ELASTIC’S PROPRIETARY SOFTWARE. BY INSTALLING AND/OR USING SUCH SOFTWARE, YOU ARE INDICATING THAT YOU AGREE TO THE TERMS AND CONDITIONS SET FORTH IN THIS AGREEMENT. IF YOU DO NOT AGREE WITH SUCH TERMS AND CONDITIONS, YOU MAY NOT INSTALL OR USE ANY OF THE SOFTWARE. IF YOU ARE INSTALLING OR USING THE SOFTWARE ON BEHALF OF YOUR EMPLOYER OR ANOTHER ENTITY, YOU REPRESENT AND WARRANT THAT YOU HAVE THE ACTUAL AUTHORITY TO AGREE TO THE TERMS AND CONDITIONS ON BEHALF OF SUCH EMPLOYER OR OTHER ENTITY. - - This COMMERCIAL SOFTWARE END USER LICENSE AGREEMENT (this “Agreement") is entered into by and between the applicable Elastic entity referenced in Attachment 1 hereto (“Elastic”) and the person, or entity on behalf of whom you are acting, as applicable (“You” or “Customer”) that has downloaded any of Elastic’s proprietary software to which this Agreement is attached or in connection with which this Agreement is presented to You (collectively, the “Software”). This Agreement is effective upon the earliest date of the commencement of any License granted pursuant to Section 1.1. below (as applicable, the “Effective Date”). - -1. SOFTWARE LICENSE AND RESTRICTIONS -1.1 License Grants. -(a) Trial Version License. Subject to the terms and conditions of this Agreement, Elastic agrees to grant, and does hereby grant to You, for a period of thirty (30) days from the date on which You first install the Software (the “Trial Term”), a License to use the Eligible Features and Functions of the Software that are applicable to the Trial Version of the Software.   You understand and agree that upon the expiration of a Trial Term, You will no longer be able to use the Software, unless you either (i) purchase a Subscription, in which case You will receive a License under Section 1.1(b) below to use the Eligible Features and Functions of the Software that are applicable to the Subscription level that You purchase, (ii) complete the Registration of Your use of the Software with Elastic, in which case, if available, You will receive a License under Section 1.1(c) below to the Basic Version of the Software or (iii) obtain from Elastic written consent (e-mail sufficient) to extend the Trial Term, which may be granted by Elastic in its sole and absolute discretion. -(b) Subscription License. If you enter into a Subscription Agreement with Elastic, then, subject to the terms and conditions of this Agreement and complete payment of any and all applicable Subscription fees, Elastic agrees to grant, and does hereby grant to You during the applicable Subscription Term, and for the restricted scope of this Agreement, a License to use the Eligible Features and Functions of the Software that are applicable to the Subscription level that You have purchased, for the number of Nodes and for the specific Project for which you have purchased a Subscription. The level of Subscription, the number of Nodes and specific Project for which you have purchased such Subscription, are set forth on the applicable ordering document entered into by Elastic and You for the purchase of the applicable Subscription (“Order Form”). -(c) Basic Version License. Subject to the terms and conditions of this Agreement, the availability of such a License for the applicable Software and any applicable limitation on the number of Nodes, and in consideration of the Registration of Your use the Software, Elastic agrees to grant, and does hereby grant to You, for a period of one (1) year from the date of Registration, a License to use the Eligible Features and Functions of the Software that are applicable to the Basic Version of the Software. The foregoing license may be renewed annually upon the mutual agreement of the parties. -1.2 Reservation of Rights; Restrictions. As between Elastic and You, Elastic owns all right title and interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software is granted to You by implication, estoppel or otherwise. You agree not to: (i) reverse engineer or decompile, decrypt, disassemble or otherwise reduce any Software or any portion thereof to human-readable form, except and only to the extent any such restriction is prohibited by applicable law, (ii) deploy the Software on more Nodes than are permitted under the applicable License grant in Section 1.1 above, (iii) where You have purchased a Subscription, use the Software in connection with any Project other than the Project for which You have purchased such Subscription, as identified on the applicable Order Form, (iv) prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement; (v) except as expressly permitted in Section 1.1 above, transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (vi) except as may be expressly permitted on an applicable Order Form or in another agreement between the parties, use the Software for providing time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other service offering; (vii) circumvent the limitations on use of the Software that are imposed or preserved by any License Key, (viii) alter or remove any Marks and Notices in the Software; (ix) deploy the Commercial Software on or in connection with any third party infrastructure as a service that includes any Elastic-branded software as a service; or (x) make available to any third party any analysis of the results of operation of the Software, including benchmarking results, without the prior written consent of Elastic. The Software may contain or be provided with open source libraries, components, utilities and other open source software (collectively, “Open Source Software”), which Open Source Software may have applicable license terms as identified on a website designated by Elastic or otherwise provided with the Software or Documentation. Notwithstanding anything to the contrary herein, use of the Open Source Software shall be subject to the license terms and conditions applicable to such Open Source Software, to the extent required by the applicable licensor (which terms shall not restrict the license rights granted to You hereunder, but may contain additional rights). -1.3 Audit Rights. You agree that, unless such right is waived in writing by Elastic, Elastic shall have the right, upon fifteen (15) days’ notice to You, to audit Your use of the Software for compliance with any limitations on Your use of the Software that are set forth herein. You agree to provide Elastic with the necessary access to the Software to conduct such an audit either (i) remotely, or (ii) if remote performance is not possible, at Your facilities, during normal business hours and no more than one (1) time in any twelve (12) month period. In the event any such audit reveals that You have used the Software in excess of the applicable quantitative limitations, You agree to promptly pay to Elastic an amount equal to the difference between the fees actually paid and the fees that You should have paid to remain in compliance with such quantitative limitations. This Section 1.3 shall survive for a period of two (2) years from the termination or expiration of this Agreement. -1.4 Cluster Metadata. You understand and agree that once deployed, and on a daily basis, the Software may provide metadata to Elastic about Your cluster statistics and associates that metadata with Your IP address. However, no other information is provided to Elastic by the Software, including any information about the data You process or store in connection with Your use of the Software. Instructions for disabling this feature are contained in the Software, however leaving this feature active enables Elastic to gather cluster statistics and provide an improved level of support to You. -2. TERM AND TERMINATION -2.1 Term. Unless earlier terminated under Section 2.2 below, this Agreement shall commence on the Effective Date, and shall continue in force for the term of the last to expire applicable license set forth in Section 1.1 above. -2.2 Termination. Either party may, upon written notice to the other party, terminate this Agreement for material breach by the other party automatically and without any other formality, if such party has failed to cure such material breach within thirty (30) days of receiving written notice of such material breach from the non-breaching party. Notwithstanding the foregoing, this Agreement shall automatically terminate in the event that You intentionally breach the scope of a license granted in Section 1.1 of this Agreement, provided that Elastic reserves the right to retroactively waive such automatic termination upon written notice to You. -2.3 Post Termination or Expiration. Upon termination or expiration of this Agreement, for any reason, You shall promptly cease the use of the Software and Documentation and destroy (and certify to Elastic in writing the fact of such destruction), or return to Elastic, all copies of the Software and Documentation then in Your possession or under Your control. -2.4 Survival. Sections 2.3, 2.4, 3, 4, 5 and 6 (as any such Sections may be modified by Attachment 1, if applicable) shall survive any termination or expiration of this Agreement. -3. LIMITED WARRANTY AND DISCLAIMER OF WARRANTIES -3.1 Limited Performance Warranty. Subject to You purchasing a Subscription, Elastic warrants that during the applicable Subscription Term, the Software will perform in all material respects in accordance with the Documentation. In the event of a breach of the foregoing warranty, Elastic’s sole obligation, and Your exclusive remedy shall be for Elastic to (i) correct any failure(s) of the Software to perform in all material respects in accordance with the Documentation or (ii) if Elastic is unable to provide such a correction within thirty (30) days of receipt of notice of the applicable non-conformity, You may elect to terminate this Agreement and the associated Subscription, and Elastic will promptly refund to You any pre-paid, unused fees paid by You to Elastic for the applicable Subscription. The warranty set forth in this Section 3.1 does not apply if the applicable Software or any portion thereof: (a) has been altered, except by or on behalf Elastic; (b) has not been used, installed, operated, repaired, or maintained in accordance with this Agreement and/or the Documentation; (c) has been subjected to abnormal physical or electrical stress, misuse, negligence, or accident; or (d) is used on equipment, products, or systems not meeting specifications identified by Elastic in the Documentation. Additionally, the warranties set forth herein only apply when notice of a warranty claim is provided to Elastic within the applicable warranty period specified herein and do not apply to any bug, defect or error caused by or attributable to software or hardware not supplied by Elastic. -3.2 Malicious Code. Elastic represents and warrants that prior to making it available for delivery to You, Elastic will use standard industry practices including, without limitation, the use of an updated commercial anti-virus program, to test the Software for Malicious Code and remove any Malicious Code it discovers. In the event of a breach of the foregoing warranty, Elastic’s sole obligation, and Your exclusive remedy shall be for Elastic to replace the Software with Software that does not contain any Malicious Code. -3.3 Warranty Disclaimer. EXCEPT AS EXPRESSLY SET FORTH IN THIS SECTION 3, TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT WARRANTY OF ANY KIND, AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, ELASTIC AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED. -4. LIMITATION OF LIABILITY -The provisions of this Section 4, including to the extent modified by an applicable provision in Attachment 1 hereto, apply if You have not purchased a Subscription. If you have purchased a Subscription, then the limitations of liability set forth in the applicable Subscription Agreement will apply in lieu of those set forth in this Section 4, including to the extent modified by an applicable provision in Attachment 1 hereto. -4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTIC OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY OTHER LIABILITY THAT CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW. -4.2 Damages Cap. IN NO EVENT SHALL ELASTIC’S OR ITS LICENSORS’ AGGREGATE, CUMULATIVE LIABILITY UNDER THIS AGREEMENT EXCEED ONE THOUSAND DOLLARS ($1,000). -4.3 YOU AGREE THAT THE FOREGOING LIMITATIONS, EXCLUSIONS AND DISCLAIMERS ARE A REASONABLE ALLOCATION OF THE RISK BETWEEN THE PARTIES AND WILL APPLY TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, EVEN IF ANY REMEDY FAILS IN ITS ESSENTIAL PURPOSE. -5. MISCELLANEOUS -This Agreement, including Attachment 1 hereto, which is hereby incorporated herein by this reference, as well as any applicable Order Form and Subscription Agreement, completely and exclusively state the entire agreement of the parties regarding the subject matter herein, and it supersedes, and its terms govern, all prior proposals, agreements, or other communications between the parties, oral or written, regarding such subject matter. In the event of any conflict between the terms and conditions of any of the foregoing documents, the conflict shall be resolved based on the following order of precedence: (i) an applicable Order Form (but only for the transaction thereunder), (ii) an applicable Subscription Agreement, (iii) the Support Services Policy and (iv) this Agreement. For the avoidance of doubt, the parties hereby expressly acknowledge and agree that if You issue any purchase order or similar document in connection with the purchase of a Subscription and/or obtaining of License to the Software, You will do so only for Your internal, administrative purposes and not with the intent to provide any contractual terms. This Agreement may not be modified except by a subsequently dated, written amendment that expressly amends this Agreement and which is signed on behalf of Elastic and You, by duly authorized representatives. If any provision hereof is held unenforceable, this Agreement will continue without said provision and be interpreted to reflect the original intent of the parties. -6. DEFINITIONS -The following terms have the meanings ascribed: -6.1 “Affiliate” means, with respect to a party, any entity that controls, is controlled by, or which is under common control with, such party, where “control” means ownership of at least fifty percent (50%) of the outstanding voting shares of the entity, or the contractual right to establish policy for, and manage the operations of, the entity. -6.2 “Basic Version” means that version of the Software available for use without the purchase of a Subscription, but which does require Registration. -6.3 “Contractor” means any third party contractor performing services on Your behalf. -6.4 “Documentation” means the published end user documentation provided by Elastic with the Software. -6.5 “Eligible Features and Functions” means those features and functions of the Software that are eligible for use with respect to the particular version of the Software licensed by You or the Subscription level purchased by You. A list of the Eligible Features and Functions that correspond to each version of the Software and Subscription levels may be found at https://www.elastic.co/subscriptions. -6.6 “License” means a limited, non-exclusive, non-transferable, fully paid up, right and license (without the right to grant or authorize sublicenses) solely for Your internal business operations to (i) install and use, in object code format, the Software, (ii) use, and distribute internally a reasonable number of copies of the Documentation, provided that You must include on such copies all Marks and Notices; (iii) permit Contractors and Your Affiliates to use the Software and Documentation as set forth in (i) and (ii) above, provided that such use by Contractors must be solely for Your benefit, and You shall be responsible for all acts and omissions of such Contractors and Affiliates in connection with their use of the Software that are contrary to the terms and conditions of this Agreement. -6.7 “License Key” means an alphanumeric code that enables the Eligible Features and Functions of the Software. -6.8 “Malicious Code” means any code that is designed to harm, or otherwise disrupt in any unauthorized manner, the operation of Your computer programs or computer systems or destroy or damage data. For clarity, Malicious Code shall not include any software bugs or errors handled through Support Services, or any standard features of functions of the Software and/or any License Key that are intended to enforce the temporal and/or other limitations on the scope of the use of the Software to the scope of the License granted to You. -6.9 “Marks and Notices” means all Elastic trademarks, trade names, logos and notices present on the Documentation as originally provided by Elastic. -6.10 “Node” means an instance of Software on a single physical server or virtual machine, provided that all client Nodes are excluded from calculating Subscription fees based on the number of Nodes. -6.11 “Project” means a specific use case for the Software, with Nodes being deployed for use in a logical grouping of functionality to support such use case. -6.12 “Registration” means Elastic’s then-current process under which You may register Your use of the Software with Elastic by providing certain information to Elastic regarding You and Your use of the Software. -6.13 “Subscription” means the right to receive Support Services and a License to the Software. -6.14 “Subscription Agreement” means a legally enforceable agreement between You and Elastic, under which You purchase a Subscription. -6.15 “Subscription Level” means the level of Subscription purchased by You. The Subscription Level purchased by You determines the specific Support Services that You are entitled to receive, and the specific Eligible Features and functions that You are entitled to use. -6.16 “Subscription Term” means the period of time for which You have purchased a Subscription. -6.17 “Trial Version” means that version of the Software available for use without the purchase of a Subscription and without Registration. - -ATTACHMENT 1 -ADDITIONAL TERMS AND CONDITIONS - -A. The following additional terms and conditions apply to all Customers with principal offices in the United States of America: - -(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch, Inc., a Delaware corporation. - -(2) Government Rights. The Software product is "Commercial Computer Software," as that term is defined in 48 C.F.R. 2.101, and as the term is used in 48 C.F.R. Part 12, and is a Commercial Item comprised of "commercial computer software" and "commercial computer software documentation". If acquired by or on behalf of a civilian agency, the U.S. Government acquires this commercial computer software and/or commercial computer software documentation subject to the terms of this Agreement, as specified in 48 C.F.R. 12.212 (Computer Software) and 12.211 (Technical Data) of the Federal Acquisition Regulation ("FAR") and its successors. If acquired by or on behalf of any agency within the Department of Defense ("DOD"), the U.S. Government acquires this commercial computer software and/or commercial computer software documentation subject to the terms of the Elastic Software End User License Agreement as specified in 48 C.F.R. 227.7202-3 and 48 C.F.R. 227.7202-4 of the DOD FAR Supplement ("DFARS") and its successors, and consistent with 48 C.F.R. 227.7202. This U.S. Government Rights clause, consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202 is in lieu of, and supersedes, any other FAR, DFARS, or other clause or provision that addresses Government rights in computer software, computer software documentation or technical data related to the Software under this Agreement and in any Subcontract under which this commercial computer software and commercial computer software documentation is acquired or licensed. -(3) Export Control. You acknowledge that the goods, software and technology acquired from Elastic are subject to U.S. export control laws and regulations, including but not limited to the International Traffic In Arms Regulations (“ITAR”) (22 C.F.R. Parts 120-130 (2010)); the Export Administration Regulations ("EAR") (15 C.F.R. Parts 730-774 (2010)); the U.S. antiboycott regulations in the EAR and U.S. Department of the Treasury regulations; the economic sanctions regulations and guidelines of the U.S. Department of the Treasury, Office of Foreign Assets Control, and the USA Patriot Act (Title III of Pub. L. 107-56, signed into law October 26, 2001), as amended.  You are now and will remain in the future compliant with all such export control laws and regulations, and will not export, re-export, otherwise transfer any Elastic goods, software or technology or disclose any Elastic software or technology to any person contrary to such laws or regulations.  You acknowledge that remote access to the Software may in certain circumstances be considered a re-export of Software, and accordingly, may not be granted in contravention of U.S. export control laws and regulations. -(4) Governing Law, Jurisdiction and Venue. -(a) Customers in California. If Customer is located in California (as determined by the Customer address on the applicable Order Form, or for a trial license under 1.1(a), the location of person who installed the Software), this Agreement will be governed by the laws of the State of California, without regard to its conflict of laws principles, and all suits hereunder will be brought solely in Federal Court for the Northern District of California, or if that court lacks subject matter jurisdiction, in any California State Court located in Santa Clara County. -(b) Customers Outside of California. If Customer is located anywhere other than California (as determined by the Customer address on the applicable Order Form, or for a trial license under 1.1(a), the location of person who installed the Software), this Agreement will be governed by the laws of the State of Delaware, without regard to its conflict of laws principles, and all suits hereunder will be brought solely in Federal Court for the District of Delaware, or if that court lacks subject matter jurisdiction, in any Delaware State Court located in Wilmington, Delaware. -(c) All Customers. This Agreement shall not be governed by the 1980 UN Convention on Contracts for the International Sale of Goods. The parties hereby irrevocably waive any and all claims and defenses either might otherwise have in any action or proceeding in any of the applicable courts set forth in (a) or (b) above, based upon any alleged lack of personal jurisdiction, improper venue, forum non conveniens, or any similar claim or defense. -(d) Equitable Relief. A breach or threatened breach, by either party of Section 4 may cause irreparable harm for which the non-breaching party shall be entitled to seek injunctive relief without being required to post a bond. - -B. The following additional terms and conditions apply to all Customers with principal offices in Canada: - -(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch B.C. Ltd., a corporation incorporated under laws of the Province of British Columbia. - -(2) Export Control. You acknowledge that the goods, software and technology acquired from Elastic are subject to the restrictions and controls set out in Section A(3) above as well as those imposed by the Export and Import Permits Act (Canada) and the regulations thereunder and that you will comply with all applicable laws and regulations. Without limitation, You acknowledge that the Software, or any portion thereof, will not be exported: (a) to any country on Canada's Area Control List; (b) to any country subject to UN Security Council embargo or action; or (c) contrary to Canada's Export Control List Item 5505. You are now and will remain in the future compliant with all such export control laws and regulations, and will not export, re-export, otherwise transfer any Elastic goods, software or technology or disclose any Elastic software or technology to any person contrary to such laws or regulations.  You will not export or re-export the Software, or any portion thereof, directly or indirectly, in violation of the Canadian export administration laws and regulations to any country or end user, or to any end user who you know or have reason to know will utilize them in the design, development or production of nuclear, chemical or biological weapons. You further acknowledge that the Software product may include technical data subject to such Canadian export regulations. Elastic does not represent that the Software is appropriate or available for use in all countries. Elastic prohibits accessing materials from countries or states where contents are illegal. You are using the Software on your own initiative and you are responsible for compliance with all applicable laws. You hereby agree to indemnify Elastic and its Affiliates from any claims, actions, liability or expenses (including reasonable lawyers' fees) resulting from Your failure to act in accordance with the acknowledgements, agreements, and representations in this Section B(2). - (3) Governing Law and Dispute Resolution. This Agreement shall be governed by the Province of Ontario and the federal laws of Canada applicable therein without regard to conflict of laws provisions. The parties hereby irrevocably waive any and all claims and defenses either might otherwise have in any such action or proceeding in any of such courts based upon any alleged lack of personal jurisdiction, improper venue, forum non conveniens or any similar claim or defense. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence, breach, termination, enforcement, interpretation or validity thereof, including the determination of the scope or applicability of this agreement to arbitrate, (each, a “Dispute”), which the parties are unable to resolve after good faith negotiations, shall be submitted first to the upper management level of the parties. The parties, through their upper management level representatives shall meet within thirty (30) days of the Dispute being referred to them and if the parties are unable to resolve such Dispute within thirty (30) days of meeting, the parties agree to seek to resolve the Dispute through mediation with ADR Chambers in the City of Toronto, Ontario, Canada before pursuing any other proceedings. The costs of the mediator shall be shared equally by the parties. If the Dispute has not been resolved within thirty (30) days of the notice to desire to mediate, any party may terminate the mediation and proceed to arbitration and the matter shall be referred to and finally resolved by arbitration at ADR Chambers pursuant to the general ADR Chambers Rules for Arbitration in the City of Toronto, Ontario, Canada. The arbitration shall proceed in accordance with the provisions of the Arbitration Act (Ontario). The arbitral panel shall consist of three (3) arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall discuss and select a chairman. If the two (2) party-appointed arbitrators are unable to agree on the chairman, the chairman shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be independent of each of the parties. The arbitrators shall have the authority to grant specific performance and to allocate between the parties the costs of arbitration (including service fees, arbitrator fees and all other fees related to the arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any arbitration shall be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith. Judgment upon the award so rendered may be entered in a court having jurisdiction or application may be made to such court for judicial acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the foregoing, Elastic shall have the right to institute an action in a court of proper jurisdiction for preliminary injunctive relief pending a final decision by the arbitrator, provided that a permanent injunction and damages shall only be awarded by the arbitrator. The language to be used in the arbitral proceedings shall be English. - (4) Language. Any translation of this Agreement is done for local requirements and in the event of a dispute between the English and any non-English version, the English version of this Agreement shall govern. At the request of the parties, the official language of this Agreement and all communications and documents relating hereto is the English language, and the English-language version shall govern all interpretation of the Agreement.  À la demande des parties, la langue officielle de la présente convention ainsi que toutes communications et tous documents s'y rapportant est la langue anglaise, et la version anglaise est celle qui régit toute interprétation de la présente convention. -(5) Warranty Disclaimer. For Customers with principal offices in the Province of Québec, the following new sentence is to be added to the end of Section 3.3: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN TYPES OF DAMAGES AND/OR WARRANTIES AND CONDITIONS. THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS AGREEMENT SHALL NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES BEYOND AND DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.” -(6) Limitation of Liability. For Customers with principal offices in the Province of Québec, the following new sentence is to be added to the end of Section 4.1: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN TYPES OF DAMAGES AND/OR WARRANTIES AND CONDITIONS.  THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS AGREEMENT SHALL NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES BEYOND AND DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.” - -C. The following additional terms and conditions apply to all Customers with principal offices outside of the United States of America and Canada: - -(1) Applicable Elasticsearch Entity. The entity providing the license in Germany is Elasticsearch Gmbh; in France is Elasticsearch SARL, in the United Kingdom is Elasticsearch Ltd, in Australia is Elasticsearch Pty Ltd., in Japan is Elasticsearch KK, in Sweden is Elasticsearch AB, in Norway is Elasticsearch AS and in all other countries is Elasticsearch BV. - -(2) Choice of Law. This Agreement shall be governed by and construed in accordance with the laws of the State of New York, without reference to or application of choice of law rules or principles. Notwithstanding any choice of law provision or otherwise, the Uniform Computer Information Transactions Act (UCITA) and the United Nations Convention on the International Sale of Goods shall not apply. - -(3) Arbitration. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence, breach, termination, enforcement, interpretation or validity thereof, including the determination of the scope or applicability of this agreement to arbitrate, (each, a “Dispute”) shall be referred to and finally resolved by arbitration under the rules and at the location identified below. The arbitral panel shall consist of three (3) arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall discuss and select a chairman. If the two party-appointed arbitrators are unable to agree on the chairman, the chairman shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be independent of each of the parties. The arbitrators shall have the authority to grant specific performance and to allocate between the parties the costs of arbitration (including service fees, arbitrator fees and all other fees related to the arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any arbitration shall be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith. Judgment upon the award so rendered may be entered in a court having jurisdiction or application may be made to such court for judicial acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the foregoing, Elastic shall have the right to institute an action in a court of proper jurisdiction for preliminary injunctive relief pending a final decision by the arbitrator, provided that a permanent injunction and damages shall only be awarded by the arbitrator. The language to be used in the arbitral proceedings shall be English. - -In addition, the following terms only apply to Customers with principal offices within Europe, the Middle East or Africa (EMEA): - -Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the London Court of International Arbitration (“LCIA”) Rules (which Rules are deemed to be incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of arbitration shall be London, England. - -(b) In addition, the following terms only apply to Customers with principal offices within Asia Pacific, Australia & New Zealand: - -Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the Rules of Conciliation and Arbitration of the International Chamber of Commerce (“ICC”) in force on the date when the notice of arbitration is submitted in accordance with such Rules (which Rules are deemed to be incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of arbitration shall be Singapore. - -(c) In addition, the following terms only apply to Customers with principal offices within the Americas (excluding North America): - -Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under International Dispute Resolution Procedures of the American Arbitration Association (“AAA”) in force on the date when the notice of arbitration is submitted in accordance with such Procedures (which Procedures are deemed to be incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of arbitration shall be New York, New York, USA. - -(4) In addition, for Customers with principal offices within the UK, the following new sentence is added to the end of Section 4.1: - -Nothing in this Agreement shall have effect so as to limit or exclude a party’s liability for death or personal injury caused by negligence or for fraud including fraudulent misrepresentation and this Section 4.1 shall take effect subject to this provision. - -(5) In addition, for Customers with principal offices within France, Sections 1.2, 3 and 4.1 of the Agreement are deleted and replaced with the following new Sections 1.2, 3.3 and 4.1: -1.2 Reservation of Rights; Restrictions. Elastic owns all right title and interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software is granted to You by implication, or otherwise. You agree not to prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement; provided that You may copy the Software for archival purposes, only where such software is provided on a non-durable medium; and You may decompile the Software, where necessary for interoperability purposes and where necessary for the correction of errors making the software unfit for its intended purpose, if such right is not reserved by Elastic as editor of the Software. Pursuant to article L122-6-1 of the French intellectual property code, Elastic reserves the right to correct any bugs as necessary for the Software to serve its intended purpose. You agree not to: (i) transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (ii) use the Software for providing time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other service offering; (iii) alter or remove any proprietary notices in the Software; or (iv) make available to any third party any analysis of the results of operation of the Software, including benchmarking results, without the prior written consent of Elastic. -3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT WARRANTY OF ANY KIND, AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, ELASTIC AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR PURPOSE WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED. -4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTIC OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT OR UNFORESEEABLE DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH, THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU, OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1, OR IN CASE OF DEATH OR PERSONAL INJURY. -(6) In addition, for Customers located within Australia: (a) Sections 3.1, 3.2 and 3.3 of the Agreement are deleted and replaced with the following new Sections 3.1, 3.2, 3.3, 3.4 and 3.5; and (b) Sections 4.1, 4.2 and 4.3 of the Agreement are deleted and replaced with the following new Sections 4.1, 4.2, and 4.3: -3.1 Despite anything in this Agreement, Elastic’s goods come with guarantees that cannot be excluded under the Australian Consumer Law (as set out in the Competition and Consumer Act 2010 (Cth)). You are entitled to a replacement or refund for a major failure and compensation for any other reasonably foreseeable loss or damage. You are also entitled to have the goods repaired or replaced if the goods fail to be of acceptable quality and the failure does not amount to a major failure. -3.2 Limited Performance Warranty. Subject to You purchasing a Subscription, Elastic warrants that during the applicable Subscription Term, the Software will perform in all material respects in accordance with the Documentation. In the event of a breach of the foregoing warranty during the Subscription Term and where You notify Elastic that the Software does not perform in all material respects in accordance with the Documentation, Elastic’s sole obligation, and Your exclusive remedy shall be for Elastic to (i) correct (at Elastic’s cost) any failure(s) of the Software to perform in all material respects in accordance with the Documentation or (ii) if Elastic is unable to provide such a correction within thirty (30) days of receipt of notice of the applicable non-conformity, You may elect to terminate this Agreement and the associated Subscription, and Elastic will promptly refund to You any pre-paid, unused fees paid by You to Elastic for the applicable Subscription. The warranty set forth in this Section 3.2 does not apply if the applicable Software or any portion thereof: (a) has been altered, except by or on behalf Elastic; (b) has not been used, installed, operated, repaired, or maintained in accordance with this Agreement and/or the Documentation; (c) has been subjected to abnormal physical or electrical stress, misuse, negligence, or accident; or (d) is used on equipment, products, or systems not meeting specifications identified by Elastic in the Documentation. Additionally, the warranties set forth herein only apply when notice of a warranty claim is provided to Elastic within the applicable warranty period specified herein and do not apply to any bug, defect or error caused by or attributable to software or hardware not supplied by Elastic. -3.3 For the purposes of Section 3.2, You must use the contact details set out below to notify Elastic that the Software does not perform in all material respects in accordance with the Documentation: -Elasticsearch Pty Ltd -4th Floor, 17-19 Alberta Street -Sydney, New South Wales, 2000, Australia - -3.4 Malicious Code. Elastic represents and warrants that prior to making it available for delivery to You, Elastic will use standard industry practices including, without limitation, the use of an updated commercial anti-virus program, to test the Software for Malicious Code and remove any Malicious Code it discovers. In the event of a breach of the foregoing warranty, Elastic’s sole obligation, and Your exclusive remedy shall be, at Elastic’s option, for Elastic to replace the Software with Software that does not contain any Malicious Code or to pay for the cost of the Software to be replaced with Software that does not contain any Malicious Code. -3.5 Warranty Disclaimer. NOTHING IN THIS AGREEMENT IS INTENDED TO LIMIT CUSTOMER’S NON-EXCLUDABLE RIGHTS UNDER THE COMPETITION AND CONSUMER ACT 2010 (CTH). EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT AND TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT WARRANTY OF ANY KIND, AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW AND EXCEPT AS SET OUT IN THIS AGREEMENT, ELASTIC AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED. -4.1 Disclaimer of Certain Damages. Subject to clause 4.3, a party is not liable for Consequential Loss however caused (including by the negligence of that party) suffered or incurred by the other party in connection with this agreement. “Consequential Loss” means loss of revenues, loss of reputation, indirect loss, loss of profits, consequential loss, loss of actual or anticipated savings, indirect loss, lost opportunities, including opportunities to enter into arrangements with third parties, loss or damage in connection with claims against by third parties, or loss or corruption or data. -4.2 Damages Cap. SUBJECT TO CLAUSES 4.1 AND 4.3, ANY LIABILITY OF ELASTIC FOR ANY LOSS OR DAMAGE, HOWEVER CAUSED (INCLUDING BY THE NEGLIGENCE OF ELASTIC), SUFFERED BY YOU IN CONNECTION WITH THIS AGREEMENT IS LIMITED TO ONE THOUSAND DOLLARS ($1,000). THE LIMITATION SET OUT IN THIS SECTION 4.2 IS AN AGGREGATE LIMIT FOR ALL CLAIMS, WHENEVER MADE. -4.3 Australian Consumer Law. IF THE COMPETITION AND CONSUMER ACT 2010 (CTH) OR ANY OTHER LEGISLATION STATES THAT THERE IS A GUARANTEE IN RELATION TO ANY GOOD OR SERVICE SUPPLIED BY ELASTIC IN CONNECTION WITH THIS AGREEMENT, AND ELASTIC’S LIABILITY FOR FAILING TO COMPLY WITH THAT GUARANTEE CANNOT BE EXCLUDED BUT MAY BE LIMITED, SECTIONS 4.1, 4.2 AND 4.3 DO NOT APPLY TO THAT LIABILITY. INSTEAD, ELASTIC’S LIABILITY FOR THAT FAILURE IS LIMITED TO (AT THE ELECTION OF ELASTIC), IN THE CASE OF A SUPPLY OF GOODS, ELASTIC REPLACING THE GOODS OR SUPPLYING EQUIVALENT GOODS OR REPAIRING THE GOODS, OR IN THE CASE OF A SUPPLY OF SERVICES, ELASTIC SUPPLYING THE SERVICES AGAIN OR PAYING THE COST OF HAVING THE SERVICES SUPPLIED AGAIN. -(7) In addition, for Customers with principal offices within Japan, Sections 1.2, 3 and 4.1 of the Agreement are deleted and replaced with the following new Sections 1.2, 3.3 and 4.1: -1.2 Reservation of Rights; Restrictions. As between Elastic and You, Elastic owns all right title and interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software is granted to You by implication or otherwise. You agree not to: (i) prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement or applicable law; (ii) transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (iii) use the Software for providing time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other service offering; (iv) alter or remove any proprietary notices in the Software; or (v) make available to any third party any analysis of the results of operation of the Software, including benchmarking results, without the prior written consent of Elastic. -3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT WARRANTY OF ANY KIND, AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, ELASTIC AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED. -4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTIC OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY SPECIALINDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY OTHER LIABILITY THAT CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW. \ No newline at end of file diff --git a/migrate/x-pack-kibana/dev-tools/ci b/migrate/x-pack-kibana/dev-tools/ci deleted file mode 100755 index 7233da201b6..00000000000 --- a/migrate/x-pack-kibana/dev-tools/ci +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/bash -# This script is used as a single command to run the x-pack-kibana tests. -# -# It will also attempt to install the appropriate version of node.js -# for the Kibana plugin tests using nvm. Set a custom nvm directory using the -# `NVM_DIR` environment variable. -# - -# Turn on semi-strict mode -set -e -set -o pipefail - -SCRIPT="$0" - -# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path. -while [ -h "$SCRIPT" ] ; do - ls=$(ls -ld "$SCRIPT") - # Drop everything prior to -> - link=$(expr "$ls" : '.*-> \(.*\)$') - if expr "$link" : '/.*' > /dev/null; then - SCRIPT="$link" - else - SCRIPT=$(dirname "$SCRIPT")/"$link" - fi -done - -# determine base directory -BASE_DIR=$(dirname "$SCRIPT")/.. - -# make BASE_DIR absolute -BASE_DIR=$(cd "$BASE_DIR"; pwd) - -PARENT_DIR=$(cd "$BASE_DIR"/..; pwd) - -# go to the parent directory -cd $PARENT_DIR - -if [ -z ${USE_EXISTING_ES:+x} ]; then - if [ -d "./elasticsearch" ]; then - echo "I expected a clean workspace but an 'elasticsearch' sibling directory already exists in [$PARENT_DIR]!" - echo - echo "Either define 'USE_EXISTING_ES' or remove the existing 'elasticsearch' sibling." - exit 1 - fi - BRANCH=${PR_SOURCE_BRANCH:-${GIT_BRANCH#*/}} # GIT_BRANCH starts with the repo, i.e., origin/master - BRANCH=${BRANCH:-master} # fall back to CI branch if not testing a PR - echo "Checking if branch '$BRANCH' has elasticsearch sibling..." - if [[ -z "$(git ls-remote --heads https://github.com/elastic/elasticsearch.git $BRANCH)" ]]; then - echo "No sibling branch, using PR target branch" - BRANCH=$PR_TARGET_BRANCH - fi - echo "Checking out Elasticsearch '$BRANCH' branch..." - git clone -b $BRANCH https://github.com/elastic/elasticsearch.git --depth=1 - printf "Checked out Elasticsearch revision: %s\n" "$(git -C elasticsearch rev-parse HEAD)" -else - if [ -d "./elasticsearch" ]; then - echo "Using existing 'elasticsearch' checkout" - else - echo "You have defined 'USE_EXISTING_ES' but no existing Elasticsearch directory exists!" - exit 2 - fi -fi - -# back to base directory -cd "$BASE_DIR" - -# install the correct node.js version -if [ -z ${NVM_DIR:+x} ]; then - export NVM_DIR="/var/lib/jenkins/.nvm"; -fi - -NVM_SCRIPT="$NVM_DIR/nvm.sh" -if [ -s "$NVM_SCRIPT" ]; then - . "$NVM_SCRIPT" # load nvm -else - echo "Unable to find the nvm script at \"$NVM_SCRIPT\"" - exit 1 -fi - -echo "Installing node.js version $(cat .node-version)..." -nvm install "$(cat .node-version)" - -echo "Running x-pack-kibana tests..." -echo "Running in $PWD" - -# output the commands -set -xuf - -# clean -gradle --stacktrace clean - -# Actually run the tests -gradle check - -# ~*~ shell-script-mode ~*~ diff --git a/migrate/x-pack-logstash/.gitignore b/migrate/x-pack-logstash/.gitignore deleted file mode 100644 index 5bbef9e8136..00000000000 --- a/migrate/x-pack-logstash/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -.bundle/ -.gradle/ -Gemfile.lock -build/ -vendor/ diff --git a/migrate/x-pack-logstash/dev-tools/ci b/migrate/x-pack-logstash/dev-tools/ci deleted file mode 100755 index d7bfd457159..00000000000 --- a/migrate/x-pack-logstash/dev-tools/ci +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# This script is used as a single command to run the x-pack-logstash tests. - -# Turn on semi-strict mode -set -e -set -o pipefail - -echo "Running x-pack-logstash tests..." - -# output the commands -set -xuf - -# clean -gradle --stacktrace clean - -# Actually run the tests -gradle check - -# ~*~ shell-script-mode ~*~ diff --git a/migrate/x-pack/build.gradle b/migrate/x-pack/build.gradle deleted file mode 100644 index 9cc2da568f2..00000000000 --- a/migrate/x-pack/build.gradle +++ /dev/null @@ -1,54 +0,0 @@ -Properties props = new Properties() -props.load(project.file("${projectDir}/../elasticsearch/buildSrc/version.properties").newDataInputStream()) -version = props.getProperty('elasticsearch') -boolean snapshot = 'true'.equals(System.getProperty('build.snapshot', 'true')) -if (snapshot) { - version += '-SNAPSHOT' -} - -String elasticsearchDir = "${projectDir}/../elasticsearch-extra/x-pack-elasticsearch" -String elasticsearchZip = "${elasticsearchDir}/plugin/build/distributions/x-pack-${version}.zip" -task assembleElasticsearch(type: GradleBuild) { - dir = file(elasticsearchDir) - tasks = ['assemble'] -} - -String kibanaDir = "${projectDir}/../x-pack-kibana" -String kibanaZip = "${kibanaDir}/build/distributions/x-pack-${version}.zip" -task assembleKibana(type: GradleBuild) { - dir = file(kibanaDir) - tasks = ['assemble'] -} - -String logstashDir = "${projectDir}/../logstash-extra/x-pack-logstash" -String logstashZip = "${logstashDir}/build/distributions/x-pack-${version}.zip" -task assembleLogstash(type: GradleBuild) { - dir = file(logstashDir) - tasks = ['assemble'] -} - -task bundlePack(type: Zip) { - dependsOn assembleElasticsearch, assembleKibana, assembleLogstash - from { zipTree(file(elasticsearchZip)) } - from { zipTree(file(kibanaZip)) } - from { zipTree(file(logstashZip)) } - destinationDir file('build/distributions') - baseName = 'x-pack' - version = project.version -} - -task assemble(dependsOn: bundlePack) { - group = 'Build' - description = 'Assembles the outputs of this project.' -} - -task build(dependsOn: assemble) { - group = 'Build' - description = 'Assembles and tests this project.' -} - -task clean(type: Delete) { - group = 'Build' - description = 'Deletes the build directory' - delete 'build' -} diff --git a/plugin/build.gradle b/plugin/build.gradle index 8080f567a28..06e96e3d4ee 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -20,7 +20,7 @@ es_meta_plugin { } dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } // https://github.com/elastic/x-plugins/issues/724 diff --git a/plugin/bwc-snapshot-dummy-projects/build.gradle b/plugin/bwc-snapshot-dummy-projects/build.gradle index a4492f011f7..d9bc77c039c 100644 --- a/plugin/bwc-snapshot-dummy-projects/build.gradle +++ b/plugin/bwc-snapshot-dummy-projects/build.gradle @@ -58,7 +58,7 @@ subprojects { task createXPackClone(type: LoggedExec) { onlyIf { xpackCheckoutDir.exists() == false } - commandLine = ['git', 'clone', project(':x-pack-elasticsearch').projectDir, xpackCheckoutPath] + commandLine = ['git', 'clone', xpackRootProject.projectDir, xpackCheckoutPath] } // we use regular Exec here to ensure we always get output, regardless of logging level diff --git a/plugin/core/build.gradle b/plugin/core/build.gradle index 188fc437ef2..2a586c4a65d 100644 --- a/plugin/core/build.gradle +++ b/plugin/core/build.gradle @@ -1,3 +1,5 @@ +import com.carrotsearch.gradle.junit4.RandomizedTestingTask +import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.MavenFilteringHack import java.nio.file.Files @@ -13,13 +15,9 @@ esplugin { description 'Elasticsearch Expanded Pack Plugin - Core' classname 'org.elasticsearch.xpack.core.XPackPlugin' hasNativeController false - requiresKeystore true - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') + requiresKeystore false } -integTest.enabled = false - dependencyLicenses { mapping from: /bc.*/, to: 'bouncycastle' mapping from: /http.*/, to: 'httpclient' // pulled in by rest client @@ -82,7 +80,6 @@ compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,- licenseHeaders { approvedLicenses << 'BCrypt (BSD-like)' additionalLicense 'BCRYP', 'BCrypt (BSD-like)', 'Copyright (c) 2006 Damien Miller ' - enabled = false } // make LicenseSigner available for testing signed licenses @@ -130,15 +127,6 @@ test { systemProperty 'es.set.netty.runtime.available.processors', 'false' } -integTestRunner { - /* - * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each - * other if we allow them to set the number of available processors as it's set-once in Netty. - */ - systemProperty 'es.set.netty.runtime.available.processors', 'false' -} - - // TODO: don't publish test artifacts just to run messy tests, fix the tests! // https://github.com/elastic/x-plugins/issues/724 configurations { @@ -163,3 +151,28 @@ thirdPartyAudit.excludes = [ 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' ] + +// xpack modules are installed in real clusters as the meta plugin, so +// installing them as individual plugins for integ tests doesn't make sense, +// so we disable integ tests +integTest.enabled = false + +// Instead we create a separate task to run the +// tests based on ESIntegTestCase +task internalClusterTest(type: RandomizedTestingTask, + group: JavaBasePlugin.VERIFICATION_GROUP, + description: 'Multi-node tests', + dependsOn: test.dependsOn) { + configure(BuildPlugin.commonTestConfig(project)) + classpath = project.test.classpath + testClassesDir = project.test.testClassesDir + include '**/*IT.class' + systemProperty 'es.set.netty.runtime.available.processors', 'false' +} +check.dependsOn internalClusterTest +internalClusterTest.mustRunAfter test + +// also add an "alias" task to make typing on the command line easier +task icTest { + dependsOn internalClusterTest +} diff --git a/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java b/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java index adbe0c6df10..d0d882b7bda 100644 --- a/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java +++ b/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java @@ -216,8 +216,8 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste && XPackSettings.TRANSPORT_SSL_ENABLED.get(settings) == false && "single-node".equals(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings)) == false) { // security is on but TLS is not configured we gonna fail the entire request and throw an exception - throw new IllegalStateException("Can not upgrade to a production license unless TLS is configured or " + - "security is disabled"); + throw new IllegalStateException("Cannot install a [" + newLicense.operationMode() + + "] license unless TLS is configured or security is disabled"); // TODO we should really validate that all nodes have xpack installed and are consistently configured but this // should happen on a different level and not in this code } else { diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 38ad93ef4bb..96f85c8f0b3 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -43,7 +43,6 @@ import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackInfoAction; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.extensions.XPackExtension; import org.elasticsearch.xpack.core.rest.action.RestXPackInfoAction; import org.elasticsearch.xpack.core.rest.action.RestXPackUsageAction; import org.elasticsearch.xpack.core.ssl.SSLConfigurationReloader; @@ -59,7 +58,6 @@ import java.security.PrivilegedAction; import java.time.Clock; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.function.Supplier; @@ -118,11 +116,6 @@ public class XPackPlugin extends XPackClientPlugin implements ScriptPlugin, Exte this.licensing = new Licensing(settings); } - // For tests only - public Collection> getExtensions() { - return Collections.emptyList(); - } - // overridable by tests protected Clock getClock() { return Clock.systemUTC(); diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/InstallXPackExtensionCommand.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/InstallXPackExtensionCommand.java deleted file mode 100644 index 9be519f2d4e..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/InstallXPackExtensionCommand.java +++ /dev/null @@ -1,359 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import joptsimple.OptionSet; -import joptsimple.OptionSpec; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.bootstrap.JarHell; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.EnvironmentAwareCommand; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.cli.UserException; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.env.Environment; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.URL; -import java.net.URLDecoder; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; - -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.Collections; -import java.util.Set; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; - -import java.security.Policy; -import java.security.PermissionCollection; -import java.security.Permission; -import java.security.NoSuchAlgorithmException; -import java.security.Permissions; -import java.security.PrivilegedAction; -import java.security.AccessController; -import java.security.UnresolvedPermission; -import java.security.URIParameter; - -import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; -import static org.elasticsearch.xpack.core.XPackPlugin.resolveXPackExtensionsFile; - -/** - * A command for the extension cli to install an extension into x-pack. - * - * The install command takes a URL to an extension zip. - * - * Extensions are packaged as zip files. Each packaged extension must contain an - * extension properties file. See {@link XPackExtensionInfo}. - *

- * The installation process first extracts the extensions files into a temporary - * directory in order to verify the extension satisfies the following requirements: - *

    - *
  • The property file exists and contains valid metadata. See {@link XPackExtensionInfo#readFromProperties(Path)}
  • - *
  • Jar hell does not exist, either between the extension's own jars or with the parent classloader (elasticsearch + x-pack)
  • - *
  • If the extension contains extra security permissions, the policy file is validated
  • - *
- */ -final class InstallXPackExtensionCommand extends EnvironmentAwareCommand { - - private final OptionSpec batchOption; - private final OptionSpec arguments; - - InstallXPackExtensionCommand() { - super("Install an extension"); - this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"), - "Enable batch mode explicitly, automatic confirmation of security permission"); - this.arguments = parser.nonOptions("extension id"); - } - - @Override - protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { - // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args - List args = arguments.values(options); - if (args.size() != 1) { - throw new UserException(ExitCodes.USAGE, "Must supply a single extension id argument"); - } - String extensionURL = args.get(0); - boolean isBatch = options.has(batchOption) || System.console() == null; - execute(terminal, extensionURL, isBatch, env); - } - - - // pkg private for testing - void execute(Terminal terminal, String extensionId, boolean isBatch, Environment env) throws Exception { - if (Files.exists(resolveXPackExtensionsFile(env)) == false) { - terminal.println("xpack extensions directory [" + resolveXPackExtensionsFile(env) + "] does not exist. Creating..."); - Files.createDirectories(resolveXPackExtensionsFile(env)); - } - - Path extensionZip = download(terminal, extensionId, env.tmpFile()); - Path extractedZip = unzip(extensionZip, resolveXPackExtensionsFile(env)); - install(terminal, extractedZip, env, isBatch); - } - - /** Downloads the extension and returns the file it was downloaded to. */ - @SuppressForbidden(reason = "We use openStream to download extensions") - private Path download(Terminal terminal, String extensionURL, Path tmpDir) throws Exception { - terminal.println("-> Downloading " + URLDecoder.decode(extensionURL, "UTF-8")); - URL url = new URL(extensionURL); - Path zip = Files.createTempFile(tmpDir, null, ".zip"); - try (InputStream in = url.openStream()) { - // must overwrite since creating the temp file above actually created the file - Files.copy(in, zip, StandardCopyOption.REPLACE_EXISTING); - } - return zip; - } - - private Path unzip(Path zip, Path extensionDir) throws IOException, UserException { - // unzip extension to a staging temp dir - Path target = Files.createTempDirectory(extensionDir, ".installing-"); - Files.createDirectories(target); - - // TODO: we should wrap this in a try/catch and try deleting the target dir on failure? - try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) { - ZipEntry entry; - byte[] buffer = new byte[8192]; - while ((entry = zipInput.getNextEntry()) != null) { - Path targetFile = target.resolve(entry.getName()); - // TODO: handle name being an absolute path - - // be on the safe side: do not rely on that directories are always extracted - // before their children (although this makes sense, but is it guaranteed?) - Files.createDirectories(targetFile.getParent()); - if (entry.isDirectory() == false) { - try (OutputStream out = Files.newOutputStream(targetFile)) { - int len; - while((len = zipInput.read(buffer)) >= 0) { - out.write(buffer, 0, len); - } - } - } - zipInput.closeEntry(); - } - } - Files.delete(zip); - return target; - } - - /** Load information about the extension, and verify it can be installed with no errors. */ - private XPackExtensionInfo verify(Terminal terminal, Path extensionRoot, Environment env, boolean isBatch) throws Exception { - // read and validate the extension descriptor - XPackExtensionInfo info = XPackExtensionInfo.readFromProperties(extensionRoot); - terminal.println(VERBOSE, info.toString()); - - // check for jar hell before any copying - jarHellCheck(extensionRoot); - - // read optional security policy (extra permissions) - // if it exists, confirm or warn the user - Path policy = extensionRoot.resolve(XPackExtensionInfo.XPACK_EXTENSION_POLICY); - if (Files.exists(policy)) { - readPolicy(policy, terminal, env, isBatch); - } - - return info; - } - - /** check a candidate extension for jar hell before installing it */ - private void jarHellCheck(Path candidate) throws Exception { - // create list of current jars in classpath - // including the x-pack jars (see $ES_CLASSPATH in bin/extension script) - final Set jars = new HashSet<>(JarHell.parseClassPath()); - - // add extension jars to the list - Path extensionJars[] = FileSystemUtils.files(candidate, "*.jar"); - for (Path jar : extensionJars) { - jars.add(jar.toUri().toURL()); - } - // TODO: no jars should be an error - // TODO: verify the classname exists in one of the jars! - - // check combined (current classpath + new jars to-be-added) - JarHell.checkJarHell(jars); - } - - /** - * Installs the extension from {@code tmpRoot} into the extensions dir. - */ - private void install(Terminal terminal, Path tmpRoot, Environment env, boolean isBatch) throws Exception { - List deleteOnFailure = new ArrayList<>(); - deleteOnFailure.add(tmpRoot); - try { - XPackExtensionInfo info = verify(terminal, tmpRoot, env, isBatch); - final Path destination = resolveXPackExtensionsFile(env).resolve(info.getName()); - if (Files.exists(destination)) { - throw new UserException(ExitCodes.USAGE, - "extension directory " + destination.toAbsolutePath() + - " already exists. To update the extension, uninstall it first using 'remove " + - info.getName() + "' command"); - } - Files.move(tmpRoot, destination, StandardCopyOption.ATOMIC_MOVE); - terminal.println("-> Installed " + info.getName()); - } catch (Exception installProblem) { - try { - IOUtils.rm(deleteOnFailure.toArray(new Path[0])); - } catch (IOException exceptionWhileRemovingFiles) { - installProblem.addSuppressed(exceptionWhileRemovingFiles); - } - throw installProblem; - } - } - - /** Format permission type, name, and actions into a string */ - static String formatPermission(Permission permission) { - StringBuilder sb = new StringBuilder(); - - String clazz = null; - if (permission instanceof UnresolvedPermission) { - clazz = ((UnresolvedPermission) permission).getUnresolvedType(); - } else { - clazz = permission.getClass().getName(); - } - sb.append(clazz); - - String name = null; - if (permission instanceof UnresolvedPermission) { - name = ((UnresolvedPermission) permission).getUnresolvedName(); - } else { - name = permission.getName(); - } - if (name != null && name.length() > 0) { - sb.append(' '); - sb.append(name); - } - - String actions = null; - if (permission instanceof UnresolvedPermission) { - actions = ((UnresolvedPermission) permission).getUnresolvedActions(); - } else { - actions = permission.getActions(); - } - if (actions != null && actions.length() > 0) { - sb.append(' '); - sb.append(actions); - } - return sb.toString(); - } - - /** - * Parses extension policy into a set of permissions - */ - static PermissionCollection parsePermissions(Path file, Path tmpDir) throws IOException { - // create a zero byte file for "comparison" - // this is necessary because the default policy impl automatically grants two permissions: - // 1. permission to exitVM (which we ignore) - // 2. read permission to the code itself (e.g. jar file of the code) - - Path emptyPolicyFile = Files.createTempFile(tmpDir, "empty", "tmp"); - final Policy emptyPolicy; - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkPermission(new SpecialPermission()); - } - emptyPolicy = - AccessController.doPrivileged((PrivilegedAction) () -> { - try { - return Policy.getInstance("JavaPolicy", new URIParameter(emptyPolicyFile.toUri())); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } - }); - IOUtils.rm(emptyPolicyFile); - - // parse the extension's policy file into a set of permissions - final Policy policy = - AccessController.doPrivileged((PrivilegedAction) () -> { - try { - return Policy.getInstance("JavaPolicy", new URIParameter(file.toUri())); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } - }); - PermissionCollection permissions = policy.getPermissions(XPackExtensionSecurity.class.getProtectionDomain()); - // this method is supported with the specific implementation we use, but just check for safety. - if (permissions == Policy.UNSUPPORTED_EMPTY_COLLECTION) { - throw new UnsupportedOperationException("JavaPolicy implementation does not support retrieving permissions"); - } - PermissionCollection actualPermissions = new Permissions(); - for (Permission permission : Collections.list(permissions.elements())) { - if (!emptyPolicy.implies(XPackExtensionSecurity.class.getProtectionDomain(), permission)) { - actualPermissions.add(permission); - } - } - actualPermissions.setReadOnly(); - return actualPermissions; - } - - - /** - * Reads extension policy, prints/confirms exceptions - */ - static void readPolicy(Path file, Terminal terminal, Environment environment, boolean batch) throws IOException { - PermissionCollection permissions = parsePermissions(file, environment.tmpFile()); - List requested = Collections.list(permissions.elements()); - if (requested.isEmpty()) { - terminal.println(Terminal.Verbosity.VERBOSE, "extension has a policy file with no additional permissions"); - return; - } - - // sort permissions in a reasonable order - Collections.sort(requested, new Comparator() { - @Override - public int compare(Permission o1, Permission o2) { - int cmp = o1.getClass().getName().compareTo(o2.getClass().getName()); - if (cmp == 0) { - String name1 = o1.getName(); - String name2 = o2.getName(); - if (name1 == null) { - name1 = ""; - } - if (name2 == null) { - name2 = ""; - } - cmp = name1.compareTo(name2); - if (cmp == 0) { - String actions1 = o1.getActions(); - String actions2 = o2.getActions(); - if (actions1 == null) { - actions1 = ""; - } - if (actions2 == null) { - actions2 = ""; - } - cmp = actions1.compareTo(actions2); - } - } - return cmp; - } - }); - - terminal.println(Terminal.Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); - terminal.println(Terminal.Verbosity.NORMAL, "@ WARNING: x-pack extension requires additional permissions @"); - terminal.println(Terminal.Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); - // print all permissions: - for (Permission permission : requested) { - terminal.println(Terminal.Verbosity.NORMAL, "* " + formatPermission(permission)); - } - terminal.println(Terminal.Verbosity.NORMAL, "See http://docs.oracle.com/javase/8/docs/technotes/guides/security/permissions.html"); - terminal.println(Terminal.Verbosity.NORMAL, "for descriptions of what these permissions allow and the associated risks."); - if (!batch) { - terminal.println(Terminal.Verbosity.NORMAL, ""); - String text = terminal.readText("Continue with installation? [y/N]"); - if (!text.equalsIgnoreCase("y")) { - throw new RuntimeException("installation aborted by user"); - } - } - } -} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/ListXPackExtensionCommand.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/ListXPackExtensionCommand.java deleted file mode 100644 index 0faae15ab1f..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/ListXPackExtensionCommand.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import joptsimple.OptionSet; -import org.elasticsearch.cli.EnvironmentAwareCommand; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.env.Environment; - -import java.io.IOException; -import java.nio.file.DirectoryStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; -import static org.elasticsearch.xpack.core.XPackPlugin.resolveXPackExtensionsFile; - -/** - * A command for the extension cli to list extensions installed in x-pack. - */ -class ListXPackExtensionCommand extends EnvironmentAwareCommand { - - ListXPackExtensionCommand() { - super("Lists installed x-pack extensions"); - } - - @Override - protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { - if (Files.exists(resolveXPackExtensionsFile(env)) == false) { - throw new IOException("Extensions directory missing: " + resolveXPackExtensionsFile(env)); - } - terminal.println(VERBOSE, "XPack Extensions directory: " + resolveXPackExtensionsFile(env)); - final List extensions = new ArrayList<>(); - try (DirectoryStream paths = Files.newDirectoryStream(resolveXPackExtensionsFile(env))) { - for (Path extension : paths) { - extensions.add(extension); - } - } - Collections.sort(extensions); - for (final Path extension : extensions) { - terminal.println(extension.getFileName().toString()); - XPackExtensionInfo info = XPackExtensionInfo.readFromProperties(extension); - terminal.println(VERBOSE, info.toString()); - } - } - -} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/RemoveXPackExtensionCommand.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/RemoveXPackExtensionCommand.java deleted file mode 100644 index 4f0f19bcee3..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/RemoveXPackExtensionCommand.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import joptsimple.OptionSet; -import joptsimple.OptionSpec; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.EnvironmentAwareCommand; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.cli.UserException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.core.XPackPlugin; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.List; - -import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; - -/** - * A command for the extension cli to remove an extension from x-pack. - */ -class RemoveXPackExtensionCommand extends EnvironmentAwareCommand { - private final OptionSpec arguments; - - RemoveXPackExtensionCommand() { - super("Removes an extension from x-pack"); - this.arguments = parser.nonOptions("extension name"); - } - - @Override - protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { - - // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args - List args = arguments.values(options); - if (args.size() != 1) { - throw new UserException(ExitCodes.USAGE, "Must supply a single extension id argument"); - } - execute(terminal, args.get(0), env); - } - - // pkg private for testing - void execute(Terminal terminal, String extensionName, Environment env) throws Exception { - terminal.println("-> Removing " + Strings.coalesceToEmpty(extensionName) + "..."); - - Path extensionDir = XPackPlugin.resolveXPackExtensionsFile(env).resolve(extensionName); - if (Files.exists(extensionDir) == false) { - throw new UserException(ExitCodes.USAGE, - "Extension " + extensionName + " not found. Run 'bin/x-pack/extension list' to get list of installed extensions."); - } - - List extensionPaths = new ArrayList<>(); - - terminal.println(VERBOSE, "Removing: " + extensionDir); - Path tmpExtensionDir = XPackPlugin.resolveXPackExtensionsFile(env).resolve(".removing-" + extensionName); - Files.move(extensionDir, tmpExtensionDir, StandardCopyOption.ATOMIC_MOVE); - extensionPaths.add(tmpExtensionDir); - - IOUtils.rm(extensionPaths.toArray(new Path[extensionPaths.size()])); - } -} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtension.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtension.java deleted file mode 100644 index 5015568a736..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtension.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler; -import org.elasticsearch.xpack.core.security.authc.Realm; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.SecurityExtension; - -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - -/** - * An extension point allowing to plug in custom functionality in x-pack authentication module. - * @deprecated use {@link SecurityExtension} via SPI instead - */ -@Deprecated -public abstract class XPackExtension implements SecurityExtension { - /** - * The name of the plugin. - */ - public abstract String name(); - - /** - * The description of the plugin. - */ - public abstract String description(); - - /** - * Returns headers which should be copied from REST requests to internal cluster requests. - */ - public Collection getRestHeaders() { - return Collections.emptyList(); - } - - /** - * Returns authentication realm implementations added by this extension. - * - * The key of the returned {@link Map} is the type name of the realm, and the value - * is a {@link Realm.Factory} which will construct - * that realm for use in authentication when that realm type is configured. - * - * @param resourceWatcherService Use to watch configuration files for changes - */ - public Map getRealms(ResourceWatcherService resourceWatcherService) { - return Collections.emptyMap(); - } - - /** - * Returns the set of {@link Setting settings} that may be configured for the each type of realm. - * - * Each setting key must be unqualified and is in the same format as will be provided via {@link RealmConfig#settings()}. - * If a given realm-type is not present in the returned map, then it will be treated as if it supported all possible settings. - * - * The life-cycle of an extension dictates that this method will be called before {@link #getRealms(ResourceWatcherService)} - */ - public Map>> getRealmSettings() { return Collections.emptyMap(); } - - /** - * Returns a handler for authentication failures, or null to use the default handler. - * - * Only one installed extension may have an authentication failure handler. If more than - * one extension returns a non-null handler, an error is raised. - */ - public AuthenticationFailureHandler getAuthenticationFailureHandler() { - return null; - } - - /** - * Returns a list of settings that should be filtered from API calls. In most cases, - * these settings are sensitive such as passwords. - * - * The value should be the full name of the setting or a wildcard that matches the - * desired setting. - * @deprecated use {@link Plugin#getSettingsFilter()} ()} via SPI extension instead - */ - @Deprecated - public List getSettingsFilter() { - return Collections.emptyList(); - } - - @Override - public String toString() { - return name(); - } -} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionCli.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionCli.java deleted file mode 100644 index c533b25f680..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionCli.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.elasticsearch.cli.LoggingAwareMultiCommand; -import org.elasticsearch.cli.MultiCommand; -import org.elasticsearch.cli.Terminal; - -/** - * A cli tool for adding, removing and listing extensions for x-pack. - */ -public class XPackExtensionCli extends LoggingAwareMultiCommand { - - private XPackExtensionCli() { - super("A tool for managing installed x-pack extensions"); - subcommands.put("list", new ListXPackExtensionCommand()); - subcommands.put("install", new InstallXPackExtensionCommand()); - subcommands.put("remove", new RemoveXPackExtensionCommand()); - } - - public static void main(String[] args) throws Exception { - exit(new XPackExtensionCli().main(args, Terminal.DEFAULT)); - } - -} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionInfo.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionInfo.java deleted file mode 100644 index 43219461b8c..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionInfo.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.elasticsearch.Version; -import org.elasticsearch.bootstrap.JarHell; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Properties; - -public class XPackExtensionInfo { - public static final String XPACK_EXTENSION_PROPERTIES = "x-pack-extension-descriptor.properties"; - public static final String XPACK_EXTENSION_POLICY = "x-pack-extension-security.policy"; - - private String name; - private String description; - private String version; - private String classname; - - public XPackExtensionInfo() { - } - - /** - * Information about extensions - * - * @param name Its name - * @param description Its description - * @param version Version number - */ - XPackExtensionInfo(String name, String description, String version, String classname) { - this.name = name; - this.description = description; - this.version = version; - this.classname = classname; - } - - /** reads (and validates) extension metadata descriptor file */ - public static XPackExtensionInfo readFromProperties(Path dir) throws IOException { - Path descriptor = dir.resolve(XPACK_EXTENSION_PROPERTIES); - Properties props = new Properties(); - try (InputStream stream = Files.newInputStream(descriptor)) { - props.load(stream); - } - String name = props.getProperty("name"); - if (name == null || name.isEmpty()) { - throw new IllegalArgumentException("Property [name] is missing in [" + descriptor + "]"); - } - String description = props.getProperty("description"); - if (description == null) { - throw new IllegalArgumentException("Property [description] is missing for extension [" + name + "]"); - } - String version = props.getProperty("version"); - if (version == null) { - throw new IllegalArgumentException("Property [version] is missing for extension [" + name + "]"); - } - - String xpackVersionString = props.getProperty("xpack.version"); - if (xpackVersionString == null) { - throw new IllegalArgumentException("Property [xpack.version] is missing for extension [" + name + "]"); - } - Version xpackVersion = Version.fromString(xpackVersionString); - if (xpackVersion.equals(Version.CURRENT) == false) { - throw new IllegalArgumentException("extension [" + name + "] is incompatible with Elasticsearch [" + - Version.CURRENT.toString() + "]. Was designed for version [" + xpackVersionString + "]"); - } - String javaVersionString = props.getProperty("java.version"); - if (javaVersionString == null) { - throw new IllegalArgumentException("Property [java.version] is missing for extension [" + name + "]"); - } - JarHell.checkVersionFormat(javaVersionString); - JarHell.checkJavaVersion(name, javaVersionString); - String classname = props.getProperty("classname"); - if (classname == null) { - throw new IllegalArgumentException("Property [classname] is missing for extension [" + name + "]"); - } - - return new XPackExtensionInfo(name, description, version, classname); - } - - /** - * @return Extension's name - */ - public String getName() { - return name; - } - - /** - * @return Extension's description if any - */ - public String getDescription() { - return description; - } - - /** - * @return extension's classname - */ - public String getClassname() { - return classname; - } - - /** - * @return Version number for the extension - */ - public String getVersion() { - return version; - } - - @Override - public String toString() { - final StringBuilder information = new StringBuilder() - .append("- XPack Extension information:\n") - .append("Name: ").append(name).append("\n") - .append("Description: ").append(description).append("\n") - .append("Version: ").append(version).append("\n") - .append(" * Classname: ").append(classname); - - return information.toString(); - } -} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionPolicy.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionPolicy.java deleted file mode 100644 index 27ae8caa459..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionPolicy.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.elasticsearch.common.SuppressForbidden; - -import java.net.URL; -import java.security.Policy; -import java.security.ProtectionDomain; -import java.security.CodeSource; -import java.security.Permission; -import java.security.SecurityPermission; -import java.util.Map; - -final class XPackExtensionPolicy extends Policy { - static final Permission SET_POLICY_PERMISSION = new SecurityPermission("setPolicy"); - static final Permission GET_POLICY_PERMISSION = new SecurityPermission("getPolicy"); - static final Permission CREATE_POLICY_PERMISSION = new SecurityPermission("createPolicy.JavaPolicy"); - - // the base policy (es + plugins) - final Policy basePolicy; - // policy extensions - final Map extensions; - // xpack code source location - final URL xpackURL; - - /** - * - * @param basePolicy The base policy - * @param extensions Extra code source extension's policy - */ - XPackExtensionPolicy(Policy basePolicy, Map extensions) { - this.basePolicy = basePolicy; - this.extensions = extensions; - xpackURL = XPackExtensionPolicy.class.getProtectionDomain().getCodeSource().getLocation(); - } - - private boolean isPolicyPermission(Permission permission) { - return GET_POLICY_PERMISSION.equals(permission) || - CREATE_POLICY_PERMISSION.equals(permission) || - SET_POLICY_PERMISSION.equals(permission); - } - - @Override @SuppressForbidden(reason = "fast equals check is desired") - public boolean implies(ProtectionDomain domain, Permission permission) { - CodeSource codeSource = domain.getCodeSource(); - if (codeSource != null && codeSource.getLocation() != null) { - if (codeSource.getLocation().equals(xpackURL) && - isPolicyPermission(permission)) { - // forbid to get, create and set java policy in xpack codesource - // it is only granted at startup in order to let xpack add the extensions policy - // and make this policy the default. - return false; - } - // check for an additional extension permission: extension policy is - // only consulted for its codesources. - Policy extension = extensions.get(codeSource.getLocation().getFile()); - if (extension != null && extension.implies(domain, permission)) { - return true; - } - } - return basePolicy.implies(domain, permission); - } -} \ No newline at end of file diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionSecurity.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionSecurity.java deleted file mode 100644 index cc9d052e192..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionSecurity.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.xpack.core.extensions.XPackExtensionInfo; - -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.DirectoryStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Map; -import java.util.HashMap; -import java.util.List; -import java.util.ArrayList; -import java.util.Collections; -import java.security.Policy; -import java.security.PrivilegedAction; -import java.security.AccessController; -import java.security.URIParameter; -import java.security.NoSuchAlgorithmException; - -final class XPackExtensionSecurity { - private XPackExtensionSecurity() {} - - /** - * Initializes the XPackExtensionPolicy - * Can only happen once! - * - * @param extsDirectory the directory where the extensions are installed - */ - static void configure(Path extsDirectory) throws IOException { - Map map = getExtensionsPermissions(extsDirectory); - if (map.size() > 0) { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkPermission(new SpecialPermission()); - } - AccessController.doPrivileged((PrivilegedAction) () -> { - Policy newPolicy = new XPackExtensionPolicy(Policy.getPolicy(), map); - Policy.setPolicy(newPolicy); - return null; - }); - } - } - - /** - * Sets properties (codebase URLs) for policy files. - * we look for matching extensions and set URLs to fit - */ - @SuppressForbidden(reason = "proper use of URL") - static Map getExtensionsPermissions(Path extsDirectory) throws IOException { - Map map = new HashMap<>(); - // collect up lists of extensions - List extensionPaths = new ArrayList<>(); - if (Files.exists(extsDirectory)) { - try (DirectoryStream stream = Files.newDirectoryStream(extsDirectory)) { - for (Path extension : stream) { - extensionPaths.add(extension); - } - } - } - // now process each one - for (Path extension : extensionPaths) { - Path policyFile = extension.resolve(XPackExtensionInfo.XPACK_EXTENSION_POLICY); - if (Files.exists(policyFile)) { - // first get a list of URLs for the extension's jars: - // we resolve symlinks so map is keyed on the normalize codebase name - List codebases = new ArrayList<>(); - try (DirectoryStream jarStream = Files.newDirectoryStream(extension, "*.jar")) { - for (Path jar : jarStream) { - codebases.add(jar.toRealPath().toUri().toURL()); - } - } - - // parse the extension's policy file into a set of permissions - Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()])); - - // consult this policy for each of the extension's jars: - for (URL url : codebases) { - if (map.put(url.getFile(), policy) != null) { - // just be paranoid ok? - throw new IllegalStateException("per-extension permissions already granted for jar file: " + url); - } - } - } - } - - return Collections.unmodifiableMap(map); - } - - /** - * Reads and returns the specified {@code policyFile}. - *

- * Resources (e.g. jar files and directories) listed in {@code codebases} location - * will be provided to the policy file via a system property of the short name: - * e.g. ${codebase.joda-convert-1.2.jar} would map to full URL. - */ - @SuppressForbidden(reason = "accesses fully qualified URLs to configure security") - static Policy readPolicy(URL policyFile, URL codebases[]) throws IOException { - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkPermission(new SpecialPermission()); - } - try { - try { - // set codebase properties - for (URL url : codebases) { - String shortName = PathUtils.get(url.toURI()).getFileName().toString(); - - AccessController.doPrivileged((PrivilegedAction) () -> { - System.setProperty("codebase." + shortName, url.toString()); - return null; - }); - } - URIParameter uri = new URIParameter(policyFile.toURI()); - return AccessController.doPrivileged((PrivilegedAction) () -> { - try { - return Policy.getInstance("JavaPolicy", uri); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } - }); - } finally { - // clear codebase properties - for (URL url : codebases) { - String shortName = PathUtils.get(url.toURI()).getFileName().toString(); - AccessController.doPrivileged((PrivilegedAction) () -> { - System.clearProperty("codebase." + shortName); - return null; - }); - } - } - } catch (URISyntaxException e) { - throw new IllegalArgumentException("unable to parse policy file `" + policyFile + "`", e); - } - } -} \ No newline at end of file diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionsService.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionsService.java deleted file mode 100644 index ebc336f38be..00000000000 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/extensions/XPackExtensionsService.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import java.io.IOException; -import java.net.URL; -import java.net.URLClassLoader; -import java.nio.file.DirectoryStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.bootstrap.JarHell; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; - -import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; - -public class XPackExtensionsService { - private final Settings settings; - - /** - * We keep around a list of extensions - */ - private final List> extensions; - - /** - * Constructs a new XPackExtensionsService - * - * @param settings The settings of the system - * @param extsDirectory The directory extensions exist in, or null if extensions should not be loaded from the filesystem - * @param classpathExtensions Extensions that exist in the classpath which should be loaded - */ - public XPackExtensionsService(Settings settings, Path extsDirectory, - Collection> classpathExtensions) { - try { - XPackExtensionSecurity.configure(extsDirectory); - } catch (Exception e) { - throw new IllegalStateException("Unable to configure extension policy", e); - } - - this.settings = settings; - List> extensionsLoaded = new ArrayList<>(); - // first we load extensions that are on the classpath. this is for tests - for (Class extClass : classpathExtensions) { - XPackExtension ext = loadExtension(extClass, settings); - XPackExtensionInfo extInfo = new XPackExtensionInfo(ext.name(), ext.description(), "NA", extClass.getName()); - extensionsLoaded.add(new Tuple<>(extInfo, ext)); - } - - // now, find all the ones that are in plugins/xpack/extensions - if (extsDirectory != null) { - try { - List bundles = getExtensionBundles(extsDirectory); - List> loaded = loadBundles(bundles); - extensionsLoaded.addAll(loaded); - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize extensions", ex); - } - } - extensions = Collections.unmodifiableList(extensionsLoaded); - } - - public List getExtensions() { - return extensions.stream().map(Tuple::v2).collect(Collectors.toList()); - } - - // a "bundle" is a an extension in a single classloader. - static class Bundle { - XPackExtensionInfo info; - List urls = new ArrayList<>(); - } - - static List getExtensionBundles(Path extsDirectory) throws IOException { - Logger logger = Loggers.getLogger(XPackExtensionsService.class); - - // TODO: remove this leniency, but tests bogusly rely on it - if (!isAccessibleDirectory(extsDirectory, logger)) { - return Collections.emptyList(); - } - - List bundles = new ArrayList<>(); - - try (DirectoryStream stream = Files.newDirectoryStream(extsDirectory)) { - for (Path extension : stream) { - if (FileSystemUtils.isHidden(extension)) { - logger.trace("--- skip hidden extension file[{}]", extension.toAbsolutePath()); - continue; - } - logger.trace("--- adding extension [{}]", extension.toAbsolutePath()); - final XPackExtensionInfo info; - try { - info = XPackExtensionInfo.readFromProperties(extension); - } catch (IOException e) { - throw new IllegalStateException("Could not load extension descriptor for existing extension [" - + extension.getFileName() + "]. Was the extension built before 2.0?", e); - } - - List urls = new ArrayList<>(); - try (DirectoryStream jarStream = Files.newDirectoryStream(extension, "*.jar")) { - for (Path jar : jarStream) { - // normalize with toRealPath to get symlinks out of our hair - urls.add(jar.toRealPath().toUri().toURL()); - } - } - final Bundle bundle = new Bundle(); - bundles.add(bundle); - bundle.info = info; - bundle.urls.addAll(urls); - } - } - - return bundles; - } - - private List> loadBundles(List bundles) { - List> exts = new ArrayList<>(); - - for (Bundle bundle : bundles) { - // jar-hell check the bundle against the parent classloader and the x-pack classloader - // pluginmanager does it, but we do it again, in case lusers mess with jar files manually - try { - final Set jars = new LinkedHashSet<>(); - // add the parent jars to the list - jars.addAll(JarHell.parseClassPath()); - - // add the x-pack jars to the list - ClassLoader xpackLoader = getClass().getClassLoader(); - // this class is loaded from the isolated x-pack plugin's classloader - if (xpackLoader instanceof URLClassLoader) { - jars.addAll(Arrays.asList(((URLClassLoader) xpackLoader).getURLs())); - } - - jars.addAll(bundle.urls); - - JarHell.checkJarHell(jars); - } catch (Exception e) { - throw new IllegalStateException("failed to load bundle " + bundle.urls + " due to jar hell", e); - } - - // create a child to load the extension in this bundle - ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader()); - final Class extClass = loadExtensionClass(bundle.info.getClassname(), loader); - final XPackExtension ext = loadExtension(extClass, settings); - exts.add(new Tuple<>(bundle.info, ext)); - } - - return Collections.unmodifiableList(exts); - } - - private Class loadExtensionClass(String className, ClassLoader loader) { - try { - return loader.loadClass(className).asSubclass(XPackExtension.class); - } catch (ClassNotFoundException e) { - throw new ElasticsearchException("Could not find extension class [" + className + "]", e); - } - } - - private XPackExtension loadExtension(Class extClass, Settings settings) { - try { - try { - return extClass.getConstructor(Settings.class).newInstance(settings); - } catch (NoSuchMethodException e) { - try { - return extClass.getConstructor().newInstance(); - } catch (NoSuchMethodException e1) { - throw new ElasticsearchException("No constructor for [" + extClass + "]. An extension class must " + - "have either an empty default constructor or a single argument constructor accepting a " + - "Settings instance"); - } - } - } catch (Exception e) { - throw new ElasticsearchException("Failed to load extension class [" + extClass.getName() + "]", e); - } - } -} \ No newline at end of file diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java index a69e4eb7571..a2f71f611f4 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DetectionRule.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.ml.job.config; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -34,11 +35,11 @@ import java.util.stream.Collectors; public class DetectionRule implements ToXContentObject, Writeable { public static final ParseField DETECTION_RULE_FIELD = new ParseField("detection_rule"); - public static final ParseField ACTIONS_FIELD = new ParseField("actions"); + public static final ParseField ACTIONS_FIELD = new ParseField("actions", "rule_action"); public static final ParseField TARGET_FIELD_NAME_FIELD = new ParseField("target_field_name"); public static final ParseField TARGET_FIELD_VALUE_FIELD = new ParseField("target_field_value"); public static final ParseField CONDITIONS_CONNECTIVE_FIELD = new ParseField("conditions_connective"); - public static final ParseField CONDITIONS_FIELD = new ParseField("conditions"); + public static final ParseField CONDITIONS_FIELD = new ParseField("conditions", "rule_conditions"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly public static final ObjectParser METADATA_PARSER = @@ -83,10 +84,14 @@ public class DetectionRule implements ToXContentObject, Writeable { } public DetectionRule(StreamInput in) throws IOException { - int actionsCount = in.readVInt(); actions = EnumSet.noneOf(RuleAction.class); - for (int i = 0; i < actionsCount; ++i) { + if (in.getVersion().before(Version.V_6_2_0)) { actions.add(RuleAction.readFromStream(in)); + } else { + int actionsCount = in.readVInt(); + for (int i = 0; i < actionsCount; ++i) { + actions.add(RuleAction.readFromStream(in)); + } } conditionsConnective = Connective.readFromStream(in); @@ -101,9 +106,14 @@ public class DetectionRule implements ToXContentObject, Writeable { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(actions.size()); - for (RuleAction action : actions) { - action.writeTo(out); + if (out.getVersion().before(Version.V_6_2_0)) { + // Only filter_results is supported prior to 6.2.0 + RuleAction.FILTER_RESULTS.writeTo(out); + } else { + out.writeVInt(actions.size()); + for (RuleAction action : actions) { + action.writeTo(out); + } } conditionsConnective.writeTo(out); diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java index 7b56409ede0..3b406e3ec34 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java @@ -26,10 +26,11 @@ import java.util.Map; import java.util.Objects; public class RuleCondition implements ToXContentObject, Writeable { - public static final ParseField TYPE_FIELD = new ParseField("type"); + public static final ParseField TYPE_FIELD = new ParseField("type", "condition_type"); public static final ParseField RULE_CONDITION_FIELD = new ParseField("rule_condition"); public static final ParseField FIELD_NAME_FIELD = new ParseField("field_name"); public static final ParseField FIELD_VALUE_FIELD = new ParseField("field_value"); + public static final ParseField FILTER_ID_FIELD = new ParseField(MlFilter.ID.getPreferredName(), "value_filter"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly public static final ConstructingObjectParser METADATA_PARSER = @@ -56,7 +57,7 @@ public class RuleCondition implements ToXContentObject, Writeable { parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_NAME_FIELD); parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_VALUE_FIELD); parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), Condition.PARSER, Condition.CONDITION_FIELD); - parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), MlFilter.ID); + parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FILTER_ID_FIELD); } } @@ -116,7 +117,7 @@ public class RuleCondition implements ToXContentObject, Writeable { builder.field(FIELD_VALUE_FIELD.getPreferredName(), fieldValue); } if (filterId != null) { - builder.field(MlFilter.ID.getPreferredName(), filterId); + builder.field(FILTER_ID_FIELD.getPreferredName(), filterId); } builder.endObject(); return builder; @@ -214,7 +215,7 @@ public class RuleCondition implements ToXContentObject, Writeable { private static void verifyCategorical(RuleCondition ruleCondition) throws ElasticsearchParseException { checkCategoricalHasNoField(Condition.CONDITION_FIELD.getPreferredName(), ruleCondition.getCondition()); checkCategoricalHasNoField(RuleCondition.FIELD_VALUE_FIELD.getPreferredName(), ruleCondition.getFieldValue()); - checkCategoricalHasField(MlFilter.ID.getPreferredName(), ruleCondition.getFilterId()); + checkCategoricalHasField(FILTER_ID_FIELD.getPreferredName(), ruleCondition.getFilterId()); } private static void checkCategoricalHasNoField(String fieldName, Object fieldValue) throws ElasticsearchParseException { @@ -232,7 +233,7 @@ public class RuleCondition implements ToXContentObject, Writeable { } private static void verifyNumerical(RuleCondition ruleCondition) throws ElasticsearchParseException { - checkNumericalHasNoField(MlFilter.ID.getPreferredName(), ruleCondition.getFilterId()); + checkNumericalHasNoField(FILTER_ID_FIELD.getPreferredName(), ruleCondition.getFilterId()); checkNumericalHasField(Condition.CONDITION_FIELD.getPreferredName(), ruleCondition.getCondition()); if (ruleCondition.getFieldName() != null && ruleCondition.getFieldValue() == null) { String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_WITH_FIELD_NAME_REQUIRES_FIELD_VALUE); diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index 1c82af8fc1b..bc420c658d0 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -77,7 +77,7 @@ public class ElasticsearchMappings { /** * Name of the Elasticsearch field by which documents are sorted by default */ - static final String ES_DOC = "_doc"; + public static final String ES_DOC = "_doc"; /** * Elasticsearch data types @@ -301,7 +301,7 @@ public class ElasticsearchMappings { addModelSizeStatsFieldsToMapping(builder); } - static XContentBuilder termFieldsMapping(String type, Collection termFields) { + public static XContentBuilder termFieldsMapping(String type, Collection termFields) { try { XContentBuilder builder = jsonBuilder().startObject(); if (type != null) { diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobStorageDeletionTask.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobStorageDeletionTask.java index 13c1bf2a4da..10fb34385c6 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobStorageDeletionTask.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobStorageDeletionTask.java @@ -21,7 +21,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -32,10 +31,13 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; +import org.elasticsearch.xpack.core.ml.action.util.PageParams; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; +import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import java.util.ArrayList; import java.util.HashSet; @@ -95,7 +97,7 @@ public class JobStorageDeletionTask extends Task { ConstantScoreQueryBuilder query = new ConstantScoreQueryBuilder(new TermQueryBuilder(Job.ID.getPreferredName(), jobId)); searchRequest.source(new SearchSourceBuilder().query(query)); - searchRequest.indicesOptions(JobProvider.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); request.setSlices(5); request.setAbortOnVersionConflict(false); request.setRefresh(true); @@ -127,7 +129,7 @@ public class JobStorageDeletionTask extends Task { // TODO: remove in 7.0 Quantiles.v54DocumentId(jobId)); searchRequest.source(new SearchSourceBuilder().query(query)); - searchRequest.indicesOptions(JobProvider.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); request.setAbortOnVersionConflict(false); request.setRefresh(true); @@ -144,14 +146,15 @@ public class JobStorageDeletionTask extends Task { } private void deleteModelState(String jobId, Client client, ActionListener listener) { - JobProvider jobProvider = new JobProvider(client, Settings.EMPTY); - jobProvider.modelSnapshots(jobId, 0, 10000, - page -> { - List deleteCandidates = page.results(); + GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null); + request.setPageParams(new PageParams(0, PageParams.MAX_FROM_SIZE_SUM)); + executeAsyncWithOrigin(client, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap( + response -> { + List deleteCandidates = response.getPage().results(); JobDataDeleter deleter = new JobDataDeleter(client, jobId); deleter.deleteModelSnapshots(deleteCandidates, listener); }, - listener::onFailure); + listener::onFailure)); } private void deleteCategorizerState(String jobId, Client client, int docNum, ActionListener finishedHandler) { @@ -163,7 +166,7 @@ public class JobStorageDeletionTask extends Task { // TODO: remove in 7.0 CategorizerState.v54DocumentId(jobId, docNum)); searchRequest.source(new SearchSourceBuilder().query(query)); - searchRequest.indicesOptions(JobProvider.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen())); request.setAbortOnVersionConflict(false); request.setRefresh(true); diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndicesUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndicesUtils.java new file mode 100644 index 00000000000..c916b6664d2 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndicesUtils.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.utils; + +import org.elasticsearch.action.support.IndicesOptions; + +/** + * Common index related operations that ML requires. + */ +public final class MlIndicesUtils { + + private MlIndicesUtils() { + } + + public static IndicesOptions addIgnoreUnavailable(IndicesOptions indicesOptions) { + return IndicesOptions.fromOptions(true, indicesOptions.allowNoIndices(), indicesOptions.expandWildcardsOpen(), + indicesOptions.expandWildcardsClosed(), indicesOptions); + } +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NameResolver.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NameResolver.java index 89d021a2e38..f737a3d9ad7 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NameResolver.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NameResolver.java @@ -46,7 +46,9 @@ public abstract class NameResolver { * * * @param expression the expression to resolve - * @param allowNoMatch if {@code false}, an error is thrown when no name matches the {@code expression} + * @param allowNoMatch if {@code false}, an error is thrown when no name matches the {@code expression}. + * This only applies to wild card expressions, if {@code expression} is not a + * wildcard then setting this true will not suppress the exception * @return the sorted set of matching names */ public SortedSet expand(String expression, boolean allowNoMatch) { @@ -68,6 +70,8 @@ public abstract class NameResolver { result.addAll(expanded); } else { List matchingNames = lookup(token); + // allowNoMatch only applies to wildcard expressions, + // this isn't so don't check the allowNoMatch here if (matchingNames.isEmpty()) { throw notFoundExceptionSupplier.apply(token); } diff --git a/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/Cron.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java similarity index 99% rename from plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/Cron.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java index f6888036d6f..a4b3473a1e5 100644 --- a/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/Cron.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.watcher.trigger.schedule; +package org.elasticsearch.xpack.core.scheduler; import org.elasticsearch.ElasticsearchParseException; import org.joda.time.DateTimeZone; diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index a70c6c2f9ff..a481f880311 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -67,6 +67,10 @@ public final class ScrollHelper { listener.onFailure(new IllegalStateException("scrolling returned more hits [" + results.size() + "] than expected [" + resp.getHits().getTotalHits() + "] so bailing out to prevent unbounded " + "memory consumption.")); + } else if (results.size() == resp.getHits().getTotalHits()) { + clearScroll.accept(resp); + // Finally, return the list of the entity + listener.onResponse(Collections.unmodifiableList(results)); } else { SearchScrollRequest scrollRequest = new SearchScrollRequest(resp.getScrollId()); scrollRequest.scroll(request.scroll().keepAlive()); @@ -74,7 +78,7 @@ public final class ScrollHelper { } } else { clearScroll.accept(resp); - // Finally, return the list of users + // Finally, return the list of the entity listener.onResponse(Collections.unmodifiableList(results)); } } catch (Exception e){ diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/security/SecurityExtension.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java similarity index 99% rename from plugin/core/src/main/java/org/elasticsearch/xpack/security/SecurityExtension.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java index 8d9ee2303cb..190e9f7520b 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/security/SecurityExtension.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security; +package org.elasticsearch.xpack.core.security; import org.apache.lucene.util.SPIClassIterator; import org.elasticsearch.action.ActionListener; diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequest.java index aa28605d0fd..5956e1a6613 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequest.java @@ -142,7 +142,7 @@ public final class CreateTokenRequest extends ActionRequest { out.writeOptionalString(refreshToken); } else { if ("refresh_token".equals(grantType)) { - throw new UnsupportedOperationException("a refresh request cannot be sent to an older version"); + throw new IllegalArgumentException("a refresh request cannot be sent to an older version"); } else { out.writeString(username); final byte[] passwordBytes = CharArrays.toUtf8Bytes(password.getChars()); diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java index 8ad4f024eaf..7a8372fe456 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java @@ -74,7 +74,7 @@ public final class InvalidateTokenRequest extends ActionRequest { if (out.getVersion().onOrAfter(Version.V_6_2_0)) { out.writeVInt(tokenType.ordinal()); } else if (tokenType == Type.REFRESH_TOKEN) { - throw new UnsupportedOperationException("refresh token invalidation cannot be serialized with version [" + out.getVersion() + + throw new IllegalArgumentException("refresh token invalidation cannot be serialized with version [" + out.getVersion() + "]"); } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java index 0cb55ccba7e..f7fabab2799 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.settings.AbstractScopedSettings; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.core.extensions.XPackExtension; +import org.elasticsearch.xpack.core.security.SecurityExtension; import java.util.Collection; import java.util.HashMap; @@ -41,7 +41,7 @@ import static org.elasticsearch.xpack.core.security.SecurityField.setting; *

*

* The allowable settings for each realm-type are determined by calls to {@link InternalRealmsSettings#getSettings()} and - * {@link XPackExtension#getRealmSettings()} + * {@link org.elasticsearch.xpack.core.security.SecurityExtension#getRealmSettings()} */ public class RealmSettings { @@ -54,11 +54,11 @@ public class RealmSettings { /** * Add the {@link Setting} configuration for all realms to the provided list. */ - public static void addSettings(List> settingsList, List extensions) { + public static void addSettings(List> settingsList, List extensions) { settingsList.add(getGroupSetting(extensions)); } - public static Collection getSettingsFilter(List extensions) { + public static Collection getSettingsFilter(List extensions) { return getSettingsByRealm(extensions).values().stream() .flatMap(Collection::stream) .filter(Setting::isFiltered) @@ -107,11 +107,11 @@ public class RealmSettings { return PREFIX + name + "." + subKey; } - private static Setting getGroupSetting(List extensions) { + private static Setting getGroupSetting(List extensions) { return Setting.groupSetting(PREFIX, getSettingsValidator(extensions), Setting.Property.NodeScope); } - private static Consumer getSettingsValidator(List extensions) { + private static Consumer getSettingsValidator(List extensions) { final Map>> childSettings = getSettingsByRealm(extensions); childSettings.forEach(RealmSettings::verify); return validator(childSettings); @@ -121,7 +121,7 @@ public class RealmSettings { * @return A map from realm-type to a collection of Setting objects. * @see InternalRealmsSettings#getSettings() */ - private static Map>> getSettingsByRealm(List extensions) { + private static Map>> getSettingsByRealm(List extensions) { final Map>> settingsByRealm = new HashMap<>(InternalRealmsSettings.getSettings()); if (extensions != null) { extensions.forEach(ext -> { diff --git a/plugin/core/src/test/java/org/elasticsearch/license/LicensesAcknowledgementTests.java b/plugin/core/src/test/java/org/elasticsearch/license/LicensesAcknowledgementTests.java index eed19785b1b..211215da21c 100644 --- a/plugin/core/src/test/java/org/elasticsearch/license/LicensesAcknowledgementTests.java +++ b/plugin/core/src/test/java/org/elasticsearch/license/LicensesAcknowledgementTests.java @@ -48,7 +48,7 @@ public class LicensesAcknowledgementTests extends AbstractLicenseServiceTestCase // ensure acknowledgement message was part of the response IllegalStateException ise = expectThrows(IllegalStateException.class, () -> licenseService.registerLicense(putLicenseRequest, new AssertingLicensesUpdateResponse(false, LicensesStatus.VALID, true))); - assertEquals("Can not upgrade to a production license unless TLS is configured or security is disabled", ise.getMessage()); + assertEquals("Cannot install a [PLATINUM] license unless TLS is configured or security is disabled", ise.getMessage()); } public void testUpgradeToProductionWithoutTLSAndSecurityDisabled() throws Exception { diff --git a/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java b/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java index 0e1b520b6ff..5f47fd8fa43 100644 --- a/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java +++ b/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java @@ -23,7 +23,10 @@ import org.joda.time.format.DateTimeFormatter; import org.junit.Assert; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; import java.util.UUID; @@ -32,6 +35,7 @@ import java.util.concurrent.atomic.AtomicReference; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt; +import static org.apache.lucene.util.LuceneTestCase.createTempFile; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomFrom; @@ -297,7 +301,11 @@ public class TestUtils { } private static Path getResourcePath(String resource) throws Exception { - return PathUtils.get(TestUtils.class.getResource(resource).toURI()); + Path resourceFile = createTempFile(); + try (InputStream resourceInput = TestUtils.class.getResourceAsStream(resource)) { + Files.copy(resourceInput, resourceFile, StandardCopyOption.REPLACE_EXISTING); + } + return resourceFile; } public static void registerAndAckSignedLicenses(final LicenseService licenseService, License license, diff --git a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/InstallXPackExtensionCommandTests.java b/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/InstallXPackExtensionCommandTests.java deleted file mode 100644 index b0291aa5cbf..00000000000 --- a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/InstallXPackExtensionCommandTests.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.Version; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.cli.UserException; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.io.InputStream; -import java.net.MalformedURLException; -import java.net.URL; -import java.nio.file.DirectoryStream; -import java.nio.file.FileVisitResult; -import java.nio.file.Files; -import java.nio.file.NoSuchFileException; -import java.nio.file.Path; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.StandardCopyOption; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; - -@LuceneTestCase.SuppressFileSystems("*") -public class InstallXPackExtensionCommandTests extends ESTestCase { - - Path home; - Environment env; - - @Before - public void setUp() throws Exception { - super.setUp(); - home = createTempDir(); - Files.createDirectories(home.resolve("org/elasticsearch/xpack/extensions").resolve("xpack").resolve("extensions")); - env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home.toString()).build()); - } - - /** - * creates a fake jar file with empty class files - */ - static void writeJar(Path jar, String... classes) throws IOException { - try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(jar))) { - for (String clazz : classes) { - stream.putNextEntry(new ZipEntry(clazz + ".class")); // no package names, just support simple classes - } - } - } - - static String writeZip(Path structure) throws IOException { - Path zip = createTempDir().resolve(structure.getFileName() + ".zip"); - try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { - Files.walkFileTree(structure, new SimpleFileVisitor() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - String target = structure.relativize(file).toString(); - stream.putNextEntry(new ZipEntry(target)); - Files.copy(file, stream); - return FileVisitResult.CONTINUE; - } - }); - } - return zip.toUri().toURL().toString(); - } - - /** - * creates an extension .zip and returns the url for testing - */ - static String createExtension(String name, Path structure) throws IOException { - XPackExtensionTestUtil.writeProperties(structure, - "description", "fake desc", - "name", name, - "version", "1.0", - "xpack.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "FakeExtension"); - writeJar(structure.resolve("extension.jar"), "FakeExtension"); - return writeZip(structure); - } - - static MockTerminal installExtension(String extensionUrl, Path home) throws Exception { - Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build()); - MockTerminal terminal = new MockTerminal(); - new InstallXPackExtensionCommand().execute(terminal, extensionUrl, true, env); - return terminal; - } - - void assertExtension(String name, Environment env) throws IOException { - Path got = env.pluginsFile().resolve("x-pack").resolve("x-pack-security").resolve("extensions").resolve(name); - assertTrue("dir " + name + " exists", Files.exists(got)); - assertTrue("jar was copied", Files.exists(got.resolve("extension.jar"))); - assertInstallCleaned(env); - } - - void assertInstallCleaned(Environment env) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsFile().resolve("x-pack"). - resolve("x-pack-security").resolve("extensions"))) { - for (Path file : stream) { - if (file.getFileName().toString().startsWith(".installing")) { - fail("Installation dir still exists, " + file); - } - } - } - } - - public void testSomethingWorks() throws Exception { - Path extDir = createTempDir(); - String extZip = createExtension("fake", extDir); - installExtension(extZip, home); - assertExtension("fake", env); - } - - public void testSpaceInUrl() throws Exception { - Path extDir = createTempDir(); - String extZip = createExtension("fake", extDir); - Path extZipWithSpaces = createTempFile("foo bar", ".zip"); - try (InputStream in = FileSystemUtils.openFileURLStream(new URL(extZip))) { - Files.copy(in, extZipWithSpaces, StandardCopyOption.REPLACE_EXISTING); - } - installExtension(extZipWithSpaces.toUri().toURL().toString(), home); - assertExtension("fake", env); - } - - public void testMalformedUrlNotMaven() throws Exception { - // has two colons, so it appears similar to maven coordinates - MalformedURLException e = expectThrows(MalformedURLException.class, () -> { - installExtension("://host:1234", home); - }); - assertTrue(e.getMessage(), e.getMessage().contains("no protocol")); - } - - public void testJarHell() throws Exception { - Path extDir = createTempDir(); - writeJar(extDir.resolve("other.jar"), "FakeExtension"); - String extZip = createExtension("fake", extDir); // adds extension.jar with FakeExtension - IllegalStateException e = expectThrows(IllegalStateException.class, () -> installExtension(extZip, home)); - assertTrue(e.getMessage(), e.getMessage().contains("jar hell")); - assertInstallCleaned(env); - } - - public void testIsolatedExtension() throws Exception { - // these both share the same FakeExtension class - Path extDir1 = createTempDir(); - String extZip1 = createExtension("fake1", extDir1); - installExtension(extZip1, home); - Path extDir2 = createTempDir(); - String extZip2 = createExtension("fake2", extDir2); - installExtension(extZip2, home); - assertExtension("fake1", env); - assertExtension("fake2", env); - } - - public void testExistingExtension() throws Exception { - String extZip = createExtension("fake", createTempDir()); - installExtension(extZip, home); - UserException e = expectThrows(UserException.class, () -> installExtension(extZip, home)); - assertTrue(e.getMessage(), e.getMessage().contains("already exists")); - assertInstallCleaned(env); - } - - public void testMissingDescriptor() throws Exception { - Path extDir = createTempDir(); - Files.createFile(extDir.resolve("fake.yml")); - String extZip = writeZip(extDir); - NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installExtension(extZip, home)); - assertTrue(e.getMessage(), e.getMessage().contains("x-pack-extension-descriptor.properties")); - assertInstallCleaned(env); - } -} diff --git a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/ListXPackExtensionCommandTests.java b/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/ListXPackExtensionCommandTests.java deleted file mode 100644 index eaaf5bdfe89..00000000000 --- a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/ListXPackExtensionCommandTests.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.Version; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.cli.UserException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.NoSuchFileException; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.Map; -import java.util.stream.Collectors; - -@LuceneTestCase.SuppressFileSystems("*") -public class ListXPackExtensionCommandTests extends ESTestCase { - - private Path home; - private Environment env; - - @Before - public void setUp() throws Exception { - super.setUp(); - home = createTempDir(); - Settings settings = Settings.builder() - .put("path.home", home) - .build(); - env = TestEnvironment.newEnvironment(settings); - Files.createDirectories(extensionsFile(env)); - } - - private static class MockListXPackExtensionCommand extends ListXPackExtensionCommand { - - private final Environment env; - - private MockListXPackExtensionCommand(final Environment env) { - this.env = env; - } - - @Override - protected Environment createEnv(Map settings) throws UserException { - return env; - } - - @Override - protected boolean addShutdownHook() { - return false; - } - - } - - static String buildMultiline(String... args){ - return Arrays.asList(args).stream().collect(Collectors.joining("\n", "", "\n")); - } - - static void buildFakeExtension(Environment env, String description, String name, String className) throws IOException { - XPackExtensionTestUtil.writeProperties(extensionsFile(env).resolve(name), - "description", description, - "name", name, - "version", "1.0", - "xpack.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", className); - } - - static Path extensionsFile(final Environment env) throws IOException { - return env.pluginsFile().resolve("x-pack").resolve("x-pack-security").resolve("extensions"); - } - - static MockTerminal listExtensions(Path home, Environment env) throws Exception { - MockTerminal terminal = new MockTerminal(); - int status = new MockListXPackExtensionCommand(env).main(new String[] { "-Epath.home=" + home }, terminal); - assertEquals(ExitCodes.OK, status); - return terminal; - } - - static MockTerminal listExtensions(Path home, Environment env, String[] args) throws Exception { - String[] argsAndHome = new String[args.length + 1]; - System.arraycopy(args, 0, argsAndHome, 0, args.length); - argsAndHome[args.length] = "-Epath.home=" + home; - MockTerminal terminal = new MockTerminal(); - int status = new MockListXPackExtensionCommand(env).main(argsAndHome, terminal); - assertEquals(ExitCodes.OK, status); - return terminal; - } - - public void testExtensionsDirMissing() throws Exception { - Files.delete(extensionsFile(env)); - IOException e = expectThrows(IOException.class, () -> listExtensions(home, env)); - assertTrue(e.getMessage(), e.getMessage().contains("Extensions directory missing")); - } - - public void testNoExtensions() throws Exception { - MockTerminal terminal = listExtensions(home, env); - assertTrue(terminal.getOutput(), terminal.getOutput().isEmpty()); - } - - public void testNoExtensionsVerbose() throws Exception { - String[] params = { "-v" }; - MockTerminal terminal = listExtensions(home, env, params); - assertEquals(terminal.getOutput(), buildMultiline("XPack Extensions directory: " + extensionsFile(env))); - } - - public void testOneExtension() throws Exception { - buildFakeExtension(env, "", "fake", "org.fake"); - MockTerminal terminal = listExtensions(home, env); - assertEquals(terminal.getOutput(), buildMultiline("fake")); - } - - public void testTwoExtensions() throws Exception { - buildFakeExtension(env, "", "fake1", "org.fake1"); - buildFakeExtension(env, "", "fake2", "org.fake2"); - MockTerminal terminal = listExtensions(home, env); - assertEquals(terminal.getOutput(), buildMultiline("fake1", "fake2")); - } - - public void testExtensionWithVerbose() throws Exception { - buildFakeExtension(env, "fake desc", "fake_extension", "org.fake"); - String[] params = { "-v" }; - MockTerminal terminal = listExtensions(home, env, params); - assertEquals(terminal.getOutput(), buildMultiline("XPack Extensions directory: " + extensionsFile(env), - "fake_extension", "- XPack Extension information:", "Name: fake_extension", - "Description: fake desc", "Version: 1.0", " * Classname: org.fake")); - } - - public void testExtensionWithVerboseMultipleExtensions() throws Exception { - buildFakeExtension(env, "fake desc 1", "fake_extension1", "org.fake"); - buildFakeExtension(env, "fake desc 2", "fake_extension2", "org.fake2"); - String[] params = { "-v" }; - MockTerminal terminal = listExtensions(home, env, params); - assertEquals(terminal.getOutput(), buildMultiline("XPack Extensions directory: " + extensionsFile(env), - "fake_extension1", "- XPack Extension information:", "Name: fake_extension1", - "Description: fake desc 1", "Version: 1.0", " * Classname: org.fake", - "fake_extension2", "- XPack Extension information:", "Name: fake_extension2", - "Description: fake desc 2", "Version: 1.0", " * Classname: org.fake2")); - } - - public void testExtensionWithoutVerboseMultipleExtensions() throws Exception { - buildFakeExtension(env, "fake desc 1", "fake_extension1", "org.fake"); - buildFakeExtension(env, "fake desc 2", "fake_extension2", "org.fake2"); - MockTerminal terminal = listExtensions(home, env, new String[0]); - String output = terminal.getOutput(); - assertEquals(output, buildMultiline("fake_extension1", "fake_extension2")); - } - - public void testExtensionWithoutDescriptorFile() throws Exception{ - Files.createDirectories(extensionsFile(env).resolve("fake1")); - NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listExtensions(home, env)); - assertEquals(e.getFile(), - extensionsFile(env).resolve("fake1").resolve(XPackExtensionInfo.XPACK_EXTENSION_PROPERTIES).toString()); - } - - public void testExtensionWithWrongDescriptorFile() throws Exception{ - XPackExtensionTestUtil.writeProperties(extensionsFile(env).resolve("fake1"), - "description", "fake desc"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listExtensions(home, env)); - assertEquals(e.getMessage(), "Property [name] is missing in [" + - extensionsFile(env).resolve("fake1").resolve(XPackExtensionInfo.XPACK_EXTENSION_PROPERTIES).toString() + "]"); - } -} diff --git a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/RemoveXPackExtensionCommandTests.java b/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/RemoveXPackExtensionCommandTests.java deleted file mode 100644 index aeb23d15a78..00000000000 --- a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/RemoveXPackExtensionCommandTests.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.cli.UserException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.nio.file.DirectoryStream; -import java.nio.file.Files; -import java.nio.file.Path; - -@LuceneTestCase.SuppressFileSystems("*") -public class RemoveXPackExtensionCommandTests extends ESTestCase { - - private Path home; - private Environment env; - - @Before - public void setUp() throws Exception { - super.setUp(); - home = createTempDir(); - env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home.toString()).build()); - } - - Path createExtensionDir(Environment env) throws IOException { - Path path = env.pluginsFile().resolve("x-pack").resolve("x-pack-security").resolve("extensions"); - return Files.createDirectories(path); - } - - static MockTerminal removeExtension(String name, Path home) throws Exception { - Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build()); - MockTerminal terminal = new MockTerminal(); - new RemoveXPackExtensionCommand().execute(terminal, name, env); - return terminal; - } - - static void assertRemoveCleaned(Path extDir) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(extDir)) { - for (Path file : stream) { - if (file.getFileName().toString().startsWith(".removing")) { - fail("Removal dir still exists, " + file); - } - } - } - } - - public void testMissing() throws Exception { - Path extDir = createExtensionDir(env); - UserException e = expectThrows(UserException.class, () -> removeExtension("dne", home)); - assertTrue(e.getMessage(), e.getMessage().contains("Extension dne not found")); - assertRemoveCleaned(extDir); - } - - public void testBasic() throws Exception { - Path extDir = createExtensionDir(env); - Files.createDirectory(extDir.resolve("fake")); - Files.createFile(extDir.resolve("fake").resolve("extension.jar")); - Files.createDirectory(extDir.resolve("fake").resolve("subdir")); - Files.createDirectory(extDir.resolve("other")); - removeExtension("fake", home); - assertFalse(Files.exists(extDir.resolve("fake"))); - assertTrue(Files.exists(extDir.resolve("other"))); - assertRemoveCleaned(extDir); - } - -} diff --git a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionInfoTests.java b/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionInfoTests.java deleted file mode 100644 index ec960468879..00000000000 --- a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionInfoTests.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.elasticsearch.Version; -import org.elasticsearch.test.ESTestCase; - -import java.nio.file.Path; - -public class XPackExtensionInfoTests extends ESTestCase { - - public void testReadFromProperties() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir, - "description", "fake desc", - "name", "my_extension", - "version", "1.0", - "xpack.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "FakeExtension"); - XPackExtensionInfo info = XPackExtensionInfo.readFromProperties(extensionDir); - assertEquals("my_extension", info.getName()); - assertEquals("fake desc", info.getDescription()); - assertEquals("1.0", info.getVersion()); - assertEquals("FakeExtension", info.getClassname()); - } - - public void testReadFromPropertiesNameMissing() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage().contains("Property [name] is missing in")); - XPackExtensionTestUtil.writeProperties(extensionDir, "name", ""); - IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e1.getMessage().contains("Property [name] is missing in")); - } - - public void testReadFromPropertiesDescriptionMissing() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir, "name", "fake-extension"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage().contains("[description] is missing")); - } - - public void testReadFromPropertiesVersionMissing() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir, "description", "fake desc", "name", "fake-extension"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage().contains("[version] is missing")); - } - - public void testReadFromPropertiesElasticsearchVersionMissing() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir, - "description", "fake desc", - "name", "my_extension", - "version", "1.0"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage().contains("[xpack.version] is missing")); - } - - public void testReadFromPropertiesJavaVersionMissing() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir, - "description", "fake desc", - "name", "my_extension", - "xpack.version", Version.CURRENT.toString(), - "version", "1.0"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage().contains("[java.version] is missing")); - } - - public void testReadFromPropertiesJavaVersionIncompatible() throws Exception { - String extensionName = "fake-extension"; - Path extensionDir = createTempDir().resolve(extensionName); - XPackExtensionTestUtil.writeProperties(extensionDir, - "description", "fake desc", - "name", extensionName, - "xpack.version", Version.CURRENT.toString(), - "java.version", "1000000.0", - "classname", "FakeExtension", - "version", "1.0"); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage(), e.getMessage().contains(extensionName + " requires Java")); - } - - public void testReadFromPropertiesBadJavaVersionFormat() throws Exception { - String extensionName = "fake-extension"; - Path extensionDir = createTempDir().resolve(extensionName); - XPackExtensionTestUtil.writeProperties(extensionDir, - "description", "fake desc", - "name", extensionName, - "xpack.version", Version.CURRENT.toString(), - "java.version", "1.7.0_80", - "classname", "FakeExtension", - "version", "1.0"); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage(), - e.getMessage().equals("version string must be a sequence of nonnegative decimal " + - "integers separated by \".\"'s and may have leading zeros but was 1.7.0_80")); - } - - public void testReadFromPropertiesBogusElasticsearchVersion() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir, - "description", "fake desc", - "version", "1.0", - "name", "my_extension", - "xpack.version", "bogus"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage().contains("version needs to contain major, minor, and revision")); - } - - public void testReadFromPropertiesOldElasticsearchVersion() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir, - "description", "fake desc", - "name", "my_extension", - "version", "1.0", - "xpack.version", Version.V_5_0_0.toString()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage().contains("Was designed for version [5.0.0]")); - } - - public void testReadFromPropertiesJvmMissingClassname() throws Exception { - Path extensionDir = createTempDir().resolve("fake-extension"); - XPackExtensionTestUtil.writeProperties(extensionDir, - "description", "fake desc", - "name", "my_extension", - "version", "1.0", - "xpack.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version")); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - XPackExtensionInfo.readFromProperties(extensionDir); - }); - assertTrue(e.getMessage().contains("Property [classname] is missing")); - } -} diff --git a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionSecurityTests.java b/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionSecurityTests.java deleted file mode 100644 index 141c5aed97a..00000000000 --- a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionSecurityTests.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.elasticsearch.test.ESTestCase; - -import java.nio.file.Path; -import java.security.Permission; -import java.security.PermissionCollection; -import java.security.Permissions; -import java.util.Collections; -import java.util.List; - -public class XPackExtensionSecurityTests extends ESTestCase { - /** Test that we can parse the set of permissions correctly for a simple policy */ - public void testParsePermissions() throws Exception { - Path scratch = createTempDir(); - Path testFile = this.getDataPath("security/simple-x-pack-extension-security.policy"); - Permissions expected = new Permissions(); - expected.add(new RuntimePermission("queuePrintJob")); - PermissionCollection actual = InstallXPackExtensionCommand.parsePermissions(testFile, scratch); - assertEquals(expected, actual); - } - - /** Test that we can parse the set of permissions correctly for a complex policy */ - public void testParseTwoPermissions() throws Exception { - Path scratch = createTempDir(); - Path testFile = this.getDataPath("security/complex-x-pack-extension-security.policy"); - Permissions expected = new Permissions(); - expected.add(new RuntimePermission("getClassLoader")); - expected.add(new RuntimePermission("closeClassLoader")); - PermissionCollection actual = InstallXPackExtensionCommand.parsePermissions(testFile, scratch); - assertEquals(expected, actual); - } - - /** Test that we can format some simple permissions properly */ - public void testFormatSimplePermission() throws Exception { - assertEquals("java.lang.RuntimePermission queuePrintJob", - InstallXPackExtensionCommand.formatPermission(new RuntimePermission("queuePrintJob"))); - } - - /** Test that we can format an unresolved permission properly */ - public void testFormatUnresolvedPermission() throws Exception { - Path scratch = createTempDir(); - Path testFile = this.getDataPath("security/unresolved-x-pack-extension-security.policy"); - PermissionCollection actual = InstallXPackExtensionCommand.parsePermissions(testFile, scratch); - List permissions = Collections.list(actual.elements()); - assertEquals(1, permissions.size()); - assertEquals("org.fake.FakePermission fakeName", InstallXPackExtensionCommand.formatPermission(permissions.get(0))); - } - - /** no guaranteed equals on these classes, we assert they contain the same set */ - private void assertEquals(PermissionCollection expected, PermissionCollection actual) { - assertEquals(asSet(Collections.list(expected.elements())), asSet(Collections.list(actual.elements()))); - } -} \ No newline at end of file diff --git a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionTestUtil.java b/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionTestUtil.java deleted file mode 100644 index 5091f2bd87d..00000000000 --- a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionTestUtil.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - - -import java.io.IOException; -import java.io.OutputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Properties; - -/** Utility methods for testing extensions */ -public class XPackExtensionTestUtil { - - /** convenience method to write a plugin properties file */ - public static void writeProperties(Path pluginDir, String... stringProps) throws IOException { - assert stringProps.length % 2 == 0; - Files.createDirectories(pluginDir); - Path propertiesFile = pluginDir.resolve(XPackExtensionInfo.XPACK_EXTENSION_PROPERTIES); - Properties properties = new Properties(); - for (int i = 0; i < stringProps.length; i += 2) { - properties.put(stringProps[i], stringProps[i + 1]); - } - try (OutputStream out = Files.newOutputStream(propertiesFile)) { - properties.store(out, ""); - } - } -} diff --git a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionsServiceTests.java b/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionsServiceTests.java deleted file mode 100644 index 68f90cfdf42..00000000000 --- a/plugin/core/src/test/java/org/elasticsearch/xpack/core/extensions/XPackExtensionsServiceTests.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.extensions; - -import org.elasticsearch.test.ESTestCase; - -import java.nio.file.Files; -import java.nio.file.Path; - -public class XPackExtensionsServiceTests extends ESTestCase { - public void testExistingPluginMissingDescriptor() throws Exception { - Path extensionsDir = createTempDir(); - Files.createDirectory(extensionsDir.resolve("extension-missing-descriptor")); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - XPackExtensionsService.getExtensionBundles(extensionsDir); - }); - assertTrue(e.getMessage(), - e.getMessage().contains("Could not load extension descriptor for existing extension")); - } -} diff --git a/plugin/deprecation/build.gradle b/plugin/deprecation/build.gradle index 0ac50f495e3..8e40d35ca26 100644 --- a/plugin/deprecation/build.gradle +++ b/plugin/deprecation/build.gradle @@ -1,4 +1,4 @@ -evaluationDependsOn(':x-pack-elasticsearch:plugin:core') +evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { @@ -6,17 +6,11 @@ esplugin { description 'Elasticsearch Expanded Pack Plugin - Deprecation' classname 'org.elasticsearch.xpack.deprecation.Deprecation' hasNativeController false - requiresKeystore true + requiresKeystore false extendedPlugins = ['x-pack-core'] - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') } archivesBaseName = 'x-pack-deprecation' -licenseHeaders.enabled = false - -integTest.enabled = false - dependencies { provided "org.elasticsearch:elasticsearch:${version}" @@ -28,5 +22,7 @@ dependencyLicenses { } run { - plugin ':x-pack-elasticsearch:plugin:core' + plugin xpackModule('core') } + +integTest.enabled = false diff --git a/plugin/graph/build.gradle b/plugin/graph/build.gradle index 30f36d50091..238558de24d 100644 --- a/plugin/graph/build.gradle +++ b/plugin/graph/build.gradle @@ -1,4 +1,4 @@ -evaluationDependsOn(':x-pack-elasticsearch:plugin:core') +evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { @@ -6,22 +6,16 @@ esplugin { description 'Elasticsearch Expanded Pack Plugin - Graph' classname 'org.elasticsearch.xpack.graph.Graph' hasNativeController false - requiresKeystore true + requiresKeystore false extendedPlugins = ['x-pack-core'] - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') } archivesBaseName = 'x-pack-graph' -licenseHeaders.enabled = false - -integTest.enabled = false - dependencies { provided "org.elasticsearch:elasticsearch:${version}" provided "org.elasticsearch.plugin:x-pack-core:${version}" - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } dependencyLicenses { @@ -29,5 +23,7 @@ dependencyLicenses { } run { - plugin ':x-pack-elasticsearch:plugin:core' + plugin xpackModule('core') } + +integTest.enabled = false diff --git a/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 7bf23c0a769..67e05fbee29 100644 --- a/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -128,8 +128,6 @@ public class GraphTests extends ESSingleNodeTestCase { checkVertexDepth(response, 0, "john", "paul", "george", "ringo"); checkVertexDepth(response, 1, "stevie", "yoko", "roy"); - checkVertexIsMoreImportant(response, "John's only collaboration is more relevant than one of Paul's many", "yoko", "stevie"); - checkVertexIsMoreImportant(response, "John's only collaboration is more relevant than George's with profligate Roy", "yoko", "roy"); assertNull("Elvis is a 3rd tier connection so should not be returned here", response.getVertex(Vertex.createId("people","elvis"))); } diff --git a/plugin/licenses/activation-1.1.1.jar.sha1 b/plugin/licenses/activation-1.1.1.jar.sha1 deleted file mode 100644 index 3bba66230e8..00000000000 --- a/plugin/licenses/activation-1.1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -485de3a253e23f645037828c07f1d7f1af40763a \ No newline at end of file diff --git a/plugin/licenses/activation-LICENSE.txt b/plugin/licenses/activation-LICENSE.txt deleted file mode 100644 index 5f3844e85cb..00000000000 --- a/plugin/licenses/activation-LICENSE.txt +++ /dev/null @@ -1,119 +0,0 @@ -COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 - -1. Definitions. - -1.1. Contributor means each individual or entity that creates or contributes to the creation of Modifications. - -1.2. Contributor Version means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor. - -1.3. Covered Software means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof. - -1.4. Executable means the Covered Software in any form other than Source Code. - -1.5. Initial Developer means the individual or entity that first makes Original Software available under this License. - -1.6. Larger Work means a work which combines Covered Software or portions thereof with code not governed by the terms of this License. - -1.7. License means this document. - -1.8. Licensable means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein. - -1.9. Modifications means the Source Code and Executable form of any of the following: - -A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; - -B. Any new file that contains any part of the Original Software or previous Modification; or - -C. Any new file that is contributed or otherwise made available under the terms of this License. - -1.10. Original Software means the Source Code and Executable form of computer software code that is originally released under this License. - -1.11. Patent Claims means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor. - -1.12. Source Code means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code. - -1.13. You (or Your) means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, You includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, control means (a)the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b)ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. - -2. License Grants. - -2.1. The Initial Developer Grant. -Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license: -(a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and -(b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof). -(c) The licenses granted in Sections2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License. -(d) Notwithstanding Section2.1(b) above, no patent license is granted: (1)for code that You delete from the Original Software, or (2)for infringements caused by: (i)the modification of the Original Software, or (ii)the combination of the Original Software with other software or devices. - -2.2. Contributor Grant. -Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: -(a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and -(b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1)Modifications made by that Contributor (or portions thereof); and (2)the combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination). -(c) The licenses granted in Sections2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party. -(d) Notwithstanding Section2.2(b) above, no patent license is granted: (1)for any code that Contributor has deleted from the Contributor Version; (2)for infringements caused by: (i)third party modifications of Contributor Version, or (ii)the combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3)under Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor. - -3. Distribution Obligations. - -3.1. Availability of Source Code. - -Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange. - -3.2. Modifications. - -The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License. - -3.3. Required Notices. -You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer. - -3.4. Application of Additional Terms. -You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer. - -3.5. Distribution of Executable Versions. -You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipients rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer. - -3.6. Larger Works. -You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software. - -4. Versions of the License. - -4.1. New Versions. -Sun Microsystems, Inc. is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License. - -4.2. Effect of New Versions. - -You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward. -4.3. Modified Versions. - -When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a)rename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b)otherwise make it clear that the license contains terms which differ from this License. - -5. DISCLAIMER OF WARRANTY. - -COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. - -6. TERMINATION. - -6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive. - -6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as Participant) alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sections2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant. - -6.3. In the event of termination under Sections6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination. - -7. LIMITATION OF LIABILITY. - -UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. - -8. U.S. GOVERNMENT END USERS. - -The Covered Software is a commercial item, as that term is defined in 48C.F.R.2.101 (Oct. 1995), consisting of commercial computer software (as that term is defined at 48 C.F.R. 252.227-7014(a)(1)) and commercial computer software documentation as such terms are used in 48C.F.R.12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License. - -9. MISCELLANEOUS. - -This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdictions conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software. - -10. RESPONSIBILITY FOR CLAIMS. - -As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. - -NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) -The GlassFish code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. - - - diff --git a/plugin/licenses/activation-NOTICE.txt b/plugin/licenses/activation-NOTICE.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/plugin/licenses/bcpkix-jdk15on-1.58.jar.sha1 b/plugin/licenses/bcpkix-jdk15on-1.58.jar.sha1 deleted file mode 100644 index 1fbdc7fcc1f..00000000000 --- a/plugin/licenses/bcpkix-jdk15on-1.58.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -15a760a039b040e767a75c77ffcc4ff62558f903 \ No newline at end of file diff --git a/plugin/licenses/bcprov-jdk15on-1.58.jar.sha1 b/plugin/licenses/bcprov-jdk15on-1.58.jar.sha1 deleted file mode 100644 index 95bc28eb146..00000000000 --- a/plugin/licenses/bcprov-jdk15on-1.58.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2c9aa1c4e3372b447ba5daabade4adf2a2264b12 \ No newline at end of file diff --git a/plugin/licenses/bouncycastle-LICENSE.txt b/plugin/licenses/bouncycastle-LICENSE.txt deleted file mode 100644 index 1bd35a7a35c..00000000000 --- a/plugin/licenses/bouncycastle-LICENSE.txt +++ /dev/null @@ -1,17 +0,0 @@ -Copyright (c) 2000-2015 The Legion of the Bouncy Castle Inc. (http://www.bouncycastle.org) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software -and associated documentation files (the "Software"), to deal in the Software without restriction, -including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial -portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, -INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR -PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/plugin/licenses/bouncycastle-NOTICE.txt b/plugin/licenses/bouncycastle-NOTICE.txt deleted file mode 100644 index 8b137891791..00000000000 --- a/plugin/licenses/bouncycastle-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugin/licenses/guava-16.0.1.jar.sha1 b/plugin/licenses/guava-16.0.1.jar.sha1 deleted file mode 100644 index 68f2b233a00..00000000000 --- a/plugin/licenses/guava-16.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5fa98cd1a63c99a44dd8d3b77e4762b066a5d0c5 \ No newline at end of file diff --git a/plugin/licenses/guava-LICENSE.txt b/plugin/licenses/guava-LICENSE.txt deleted file mode 100644 index d6456956733..00000000000 --- a/plugin/licenses/guava-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/plugin/licenses/guava-NOTICE.txt b/plugin/licenses/guava-NOTICE.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/plugin/licenses/javax.mail-1.5.6.jar.sha1 b/plugin/licenses/javax.mail-1.5.6.jar.sha1 deleted file mode 100644 index c9d823f6a53..00000000000 --- a/plugin/licenses/javax.mail-1.5.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ab5daef2f881c42c8e280cbe918ec4d7fdfd7efe \ No newline at end of file diff --git a/plugin/licenses/javax.mail-LICENSE.txt b/plugin/licenses/javax.mail-LICENSE.txt deleted file mode 100644 index 5ad62c442b3..00000000000 --- a/plugin/licenses/javax.mail-LICENSE.txt +++ /dev/null @@ -1,759 +0,0 @@ -COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.1 - -1. Definitions. - - 1.1. "Contributor" means each individual or entity that creates or - contributes to the creation of Modifications. - - 1.2. "Contributor Version" means the combination of the Original - Software, prior Modifications used by a Contributor (if any), and - the Modifications made by that particular Contributor. - - 1.3. "Covered Software" means (a) the Original Software, or (b) - Modifications, or (c) the combination of files containing Original - Software with files containing Modifications, in each case including - portions thereof. - - 1.4. "Executable" means the Covered Software in any form other than - Source Code. - - 1.5. "Initial Developer" means the individual or entity that first - makes Original Software available under this License. - - 1.6. "Larger Work" means a work which combines Covered Software or - portions thereof with code not governed by the terms of this License. - - 1.7. "License" means this document. - - 1.8. "Licensable" means having the right to grant, to the maximum - extent possible, whether at the time of the initial grant or - subsequently acquired, any and all of the rights conveyed herein. - - 1.9. "Modifications" means the Source Code and Executable form of - any of the following: - - A. Any file that results from an addition to, deletion from or - modification of the contents of a file containing Original Software - or previous Modifications; - - B. Any new file that contains any part of the Original Software or - previous Modification; or - - C. Any new file that is contributed or otherwise made available - under the terms of this License. - - 1.10. "Original Software" means the Source Code and Executable form - of computer software code that is originally released under this - License. - - 1.11. "Patent Claims" means any patent claim(s), now owned or - hereafter acquired, including without limitation, method, process, - and apparatus claims, in any patent Licensable by grantor. - - 1.12. "Source Code" means (a) the common form of computer software - code in which modifications are made and (b) associated - documentation included in or with such code. - - 1.13. "You" (or "Your") means an individual or a legal entity - exercising rights under, and complying with all of the terms of, - this License. For legal entities, "You" includes any entity which - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants. - - 2.1. The Initial Developer Grant. - - Conditioned upon Your compliance with Section 3.1 below and subject - to third party intellectual property claims, the Initial Developer - hereby grants You a world-wide, royalty-free, non-exclusive license: - - (a) under intellectual property rights (other than patent or - trademark) Licensable by Initial Developer, to use, reproduce, - modify, display, perform, sublicense and distribute the Original - Software (or portions thereof), with or without Modifications, - and/or as part of a Larger Work; and - - (b) under Patent Claims infringed by the making, using or selling of - Original Software, to make, have made, use, practice, sell, and - offer for sale, and/or otherwise dispose of the Original Software - (or portions thereof). - - (c) The licenses granted in Sections 2.1(a) and (b) are effective on - the date Initial Developer first distributes or otherwise makes the - Original Software available to a third party under the terms of this - License. - - (d) Notwithstanding Section 2.1(b) above, no patent license is - granted: (1) for code that You delete from the Original Software, or - (2) for infringements caused by: (i) the modification of the - Original Software, or (ii) the combination of the Original Software - with other software or devices. - - 2.2. Contributor Grant. - - Conditioned upon Your compliance with Section 3.1 below and subject - to third party intellectual property claims, each Contributor hereby - grants You a world-wide, royalty-free, non-exclusive license: - - (a) under intellectual property rights (other than patent or - trademark) Licensable by Contributor to use, reproduce, modify, - display, perform, sublicense and distribute the Modifications - created by such Contributor (or portions thereof), either on an - unmodified basis, with other Modifications, as Covered Software - and/or as part of a Larger Work; and - - (b) under Patent Claims infringed by the making, using, or selling - of Modifications made by that Contributor either alone and/or in - combination with its Contributor Version (or portions of such - combination), to make, use, sell, offer for sale, have made, and/or - otherwise dispose of: (1) Modifications made by that Contributor (or - portions thereof); and (2) the combination of Modifications made by - that Contributor with its Contributor Version (or portions of such - combination). - - (c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective - on the date Contributor first distributes or otherwise makes the - Modifications available to a third party. - - (d) Notwithstanding Section 2.2(b) above, no patent license is - granted: (1) for any code that Contributor has deleted from the - Contributor Version; (2) for infringements caused by: (i) third - party modifications of Contributor Version, or (ii) the combination - of Modifications made by that Contributor with other software - (except as part of the Contributor Version) or other devices; or (3) - under Patent Claims infringed by Covered Software in the absence of - Modifications made by that Contributor. - -3. Distribution Obligations. - - 3.1. Availability of Source Code. - - Any Covered Software that You distribute or otherwise make available - in Executable form must also be made available in Source Code form - and that Source Code form must be distributed only under the terms - of this License. You must include a copy of this License with every - copy of the Source Code form of the Covered Software You distribute - or otherwise make available. You must inform recipients of any such - Covered Software in Executable form as to how they can obtain such - Covered Software in Source Code form in a reasonable manner on or - through a medium customarily used for software exchange. - - 3.2. Modifications. - - The Modifications that You create or to which You contribute are - governed by the terms of this License. You represent that You - believe Your Modifications are Your original creation(s) and/or You - have sufficient rights to grant the rights conveyed by this License. - - 3.3. Required Notices. - - You must include a notice in each of Your Modifications that - identifies You as the Contributor of the Modification. You may not - remove or alter any copyright, patent or trademark notices contained - within the Covered Software, or any notices of licensing or any - descriptive text giving attribution to any Contributor or the - Initial Developer. - - 3.4. Application of Additional Terms. - - You may not offer or impose any terms on any Covered Software in - Source Code form that alters or restricts the applicable version of - this License or the recipients' rights hereunder. You may choose to - offer, and to charge a fee for, warranty, support, indemnity or - liability obligations to one or more recipients of Covered Software. - However, you may do so only on Your own behalf, and not on behalf of - the Initial Developer or any Contributor. You must make it - absolutely clear that any such warranty, support, indemnity or - liability obligation is offered by You alone, and You hereby agree - to indemnify the Initial Developer and every Contributor for any - liability incurred by the Initial Developer or such Contributor as a - result of warranty, support, indemnity or liability terms You offer. - - 3.5. Distribution of Executable Versions. - - You may distribute the Executable form of the Covered Software under - the terms of this License or under the terms of a license of Your - choice, which may contain terms different from this License, - provided that You are in compliance with the terms of this License - and that the license for the Executable form does not attempt to - limit or alter the recipient's rights in the Source Code form from - the rights set forth in this License. If You distribute the Covered - Software in Executable form under a different license, You must make - it absolutely clear that any terms which differ from this License - are offered by You alone, not by the Initial Developer or - Contributor. You hereby agree to indemnify the Initial Developer and - every Contributor for any liability incurred by the Initial - Developer or such Contributor as a result of any such terms You offer. - - 3.6. Larger Works. - - You may create a Larger Work by combining Covered Software with - other code not governed by the terms of this License and distribute - the Larger Work as a single product. In such a case, You must make - sure the requirements of this License are fulfilled for the Covered - Software. - -4. Versions of the License. - - 4.1. New Versions. - - Oracle is the initial license steward and may publish revised and/or - new versions of this License from time to time. Each version will be - given a distinguishing version number. Except as provided in Section - 4.3, no one other than the license steward has the right to modify - this License. - - 4.2. Effect of New Versions. - - You may always continue to use, distribute or otherwise make the - Covered Software available under the terms of the version of the - License under which You originally received the Covered Software. If - the Initial Developer includes a notice in the Original Software - prohibiting it from being distributed or otherwise made available - under any subsequent version of the License, You must distribute and - make the Covered Software available under the terms of the version - of the License under which You originally received the Covered - Software. Otherwise, You may also choose to use, distribute or - otherwise make the Covered Software available under the terms of any - subsequent version of the License published by the license steward. - - 4.3. Modified Versions. - - When You are an Initial Developer and You want to create a new - license for Your Original Software, You may create and use a - modified version of this License if You: (a) rename the license and - remove any references to the name of the license steward (except to - note that the license differs from this License); and (b) otherwise - make it clear that the license contains terms which differ from this - License. - -5. DISCLAIMER OF WARRANTY. - - COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, - WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, - INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE - IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR - NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF - THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE - DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY - OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, - REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN - ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS - AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. - -6. TERMINATION. - - 6.1. This License and the rights granted hereunder will terminate - automatically if You fail to comply with terms herein and fail to - cure such breach within 30 days of becoming aware of the breach. - Provisions which, by their nature, must remain in effect beyond the - termination of this License shall survive. - - 6.2. If You assert a patent infringement claim (excluding - declaratory judgment actions) against Initial Developer or a - Contributor (the Initial Developer or Contributor against whom You - assert such claim is referred to as "Participant") alleging that the - Participant Software (meaning the Contributor Version where the - Participant is a Contributor or the Original Software where the - Participant is the Initial Developer) directly or indirectly - infringes any patent, then any and all rights granted directly or - indirectly to You by such Participant, the Initial Developer (if the - Initial Developer is not the Participant) and all Contributors under - Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice - from Participant terminate prospectively and automatically at the - expiration of such 60 day notice period, unless if within such 60 - day period You withdraw Your claim with respect to the Participant - Software against such Participant either unilaterally or pursuant to - a written agreement with Participant. - - 6.3. If You assert a patent infringement claim against Participant - alleging that the Participant Software directly or indirectly - infringes any patent where such claim is resolved (such as by - license or settlement) prior to the initiation of patent - infringement litigation, then the reasonable value of the licenses - granted by such Participant under Sections 2.1 or 2.2 shall be taken - into account in determining the amount or value of any payment or - license. - - 6.4. In the event of termination under Sections 6.1 or 6.2 above, - all end user licenses that have been validly granted by You or any - distributor hereunder prior to termination (excluding licenses - granted to You by any distributor) shall survive termination. - -7. LIMITATION OF LIABILITY. - - UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT - (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE - INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF - COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE - TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR - CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT - LIMITATION, DAMAGES FOR LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER - FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR - LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE - POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT - APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH - PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH - LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR - LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION - AND LIMITATION MAY NOT APPLY TO YOU. - -8. U.S. GOVERNMENT END USERS. - - The Covered Software is a "commercial item," as that term is defined - in 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer - software" (as that term is defined at 48 C.F.R. - 252.227-7014(a)(1)) and "commercial computer software documentation" - as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent - with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 - (June 1995), all U.S. Government End Users acquire Covered Software - with only those rights set forth herein. This U.S. Government Rights - clause is in lieu of, and supersedes, any other FAR, DFAR, or other - clause or provision that addresses Government rights in computer - software under this License. - -9. MISCELLANEOUS. - - This License represents the complete agreement concerning subject - matter hereof. If any provision of this License is held to be - unenforceable, such provision shall be reformed only to the extent - necessary to make it enforceable. This License shall be governed by - the law of the jurisdiction specified in a notice contained within - the Original Software (except to the extent applicable law, if any, - provides otherwise), excluding such jurisdiction's conflict-of-law - provisions. Any litigation relating to this License shall be subject - to the jurisdiction of the courts located in the jurisdiction and - venue specified in a notice contained within the Original Software, - with the losing party responsible for costs, including, without - limitation, court costs and reasonable attorneys' fees and expenses. - The application of the United Nations Convention on Contracts for - the International Sale of Goods is expressly excluded. Any law or - regulation which provides that the language of a contract shall be - construed against the drafter shall not apply to this License. You - agree that You alone are responsible for compliance with the United - States export administration regulations (and the export control - laws and regulation of any other countries) when You use, distribute - or otherwise make available any Covered Software. - -10. RESPONSIBILITY FOR CLAIMS. - - As between Initial Developer and the Contributors, each party is - responsible for claims and damages arising, directly or indirectly, - out of its utilization of rights under this License and You agree to - work with Initial Developer and Contributors to distribute such - responsibility on an equitable basis. Nothing herein is intended or - shall be deemed to constitute any admission of liability. - ------------------------------------------------------------------------- - -NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION -LICENSE (CDDL) - -The code released under the CDDL shall be governed by the laws of the -State of California (excluding conflict-of-law provisions). Any -litigation relating to this License shall be subject to the jurisdiction -of the Federal Courts of the Northern District of California and the -state courts of the State of California, with venue lying in Santa Clara -County, California. - - - - The GNU General Public License (GPL) Version 2, June 1991 - -Copyright (C) 1989, 1991 Free Software Foundation, Inc. -51 Franklin Street, Fifth Floor -Boston, MA 02110-1335 -USA - -Everyone is permitted to copy and distribute verbatim copies -of this license document, but changing it is not allowed. - -Preamble - -The licenses for most software are designed to take away your freedom to -share and change it. By contrast, the GNU General Public License is -intended to guarantee your freedom to share and change free software--to -make sure the software is free for all its users. This General Public -License applies to most of the Free Software Foundation's software and -to any other program whose authors commit to using it. (Some other Free -Software Foundation software is covered by the GNU Library General -Public License instead.) You can apply it to your programs, too. - -When we speak of free software, we are referring to freedom, not price. -Our General Public Licenses are designed to make sure that you have the -freedom to distribute copies of free software (and charge for this -service if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs; and that you know you can do these things. - -To protect your rights, we need to make restrictions that forbid anyone -to deny you these rights or to ask you to surrender the rights. These -restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - -For example, if you distribute copies of such a program, whether gratis -or for a fee, you must give the recipients all the rights that you have. -You must make sure that they, too, receive or can get the source code. -And you must show them these terms so they know their rights. - -We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - -Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - -Finally, any free program is threatened constantly by software patents. -We wish to avoid the danger that redistributors of a free program will -individually obtain patent licenses, in effect making the program -proprietary. To prevent this, we have made it clear that any patent must -be licensed for everyone's free use or not licensed at all. - -The precise terms and conditions for copying, distribution and -modification follow. - -TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - -0. This License applies to any program or other work which contains a -notice placed by the copyright holder saying it may be distributed under -the terms of this General Public License. The "Program", below, refers -to any such program or work, and a "work based on the Program" means -either the Program or any derivative work under copyright law: that is -to say, a work containing the Program or a portion of it, either -verbatim or with modifications and/or translated into another language. -(Hereinafter, translation is included without limitation in the term -"modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of running -the Program is not restricted, and the output from the Program is -covered only if its contents constitute a work based on the Program -(independent of having been made by running the Program). Whether that -is true depends on what the Program does. - -1. You may copy and distribute verbatim copies of the Program's source -code as you receive it, in any medium, provided that you conspicuously -and appropriately publish on each copy an appropriate copyright notice -and disclaimer of warranty; keep intact all the notices that refer to -this License and to the absence of any warranty; and give any other -recipients of the Program a copy of this License along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - -2. You may modify your copy or copies of the Program or any portion of -it, thus forming a work based on the Program, and copy and distribute -such modifications or work under the terms of Section 1 above, provided -that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any part - thereof, to be licensed as a whole at no charge to all third parties - under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a notice - that there is no warranty (or else, saying that you provide a - warranty) and that users may redistribute the program under these - conditions, and telling the user how to view a copy of this License. - (Exception: if the Program itself is interactive but does not - normally print such an announcement, your work based on the Program - is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, and -can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based on -the Program, the distribution of the whole must be on the terms of this -License, whose permissions for other licensees extend to the entire -whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of a -storage or distribution medium does not bring the other work under the -scope of this License. - -3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections 1 - and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your cost - of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer to - distribute corresponding source code. (This alternative is allowed - only for noncommercial distribution and only if you received the - program in object code or executable form with such an offer, in - accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source code -means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to control -compilation and installation of the executable. However, as a special -exception, the source code distributed need not include anything that is -normally distributed (in either source or binary form) with the major -components (compiler, kernel, and so on) of the operating system on -which the executable runs, unless that component itself accompanies the -executable. - -If distribution of executable or object code is made by offering access -to copy from a designated place, then offering equivalent access to copy -the source code from the same place counts as distribution of the source -code, even though third parties are not compelled to copy the source -along with the object code. - -4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt otherwise -to copy, modify, sublicense or distribute the Program is void, and will -automatically terminate your rights under this License. However, parties -who have received copies, or rights, from you under this License will -not have their licenses terminated so long as such parties remain in -full compliance. - -5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and all -its terms and conditions for copying, distributing or modifying the -Program or works based on it. - -6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further restrictions -on the recipients' exercise of the rights granted herein. You are not -responsible for enforcing compliance by third parties to this License. - -7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot distribute -so as to satisfy simultaneously your obligations under this License and -any other pertinent obligations, then as a consequence you may not -distribute the Program at all. For example, if a patent license would -not permit royalty-free redistribution of the Program by all those who -receive copies directly or indirectly through you, then the only way you -could satisfy both it and this License would be to refrain entirely from -distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is implemented -by public license practices. Many people have made generous -contributions to the wide range of software distributed through that -system in reliance on consistent application of that system; it is up to -the author/donor to decide if he or she is willing to distribute -software through any other system and a licensee cannot impose that choice. - -This section is intended to make thoroughly clear what is believed to be -a consequence of the rest of this License. - -8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License may -add an explicit geographical distribution limitation excluding those -countries, so that distribution is permitted only in or among countries -not thus excluded. In such case, this License incorporates the -limitation as if written in the body of this License. - -9. The Free Software Foundation may publish revised and/or new -versions of the General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and -conditions either of that version or of any later version published by -the Free Software Foundation. If the Program does not specify a version -number of this License, you may choose any version ever published by the -Free Software Foundation. - -10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the -author to ask for permission. For software which is copyrighted by the -Free Software Foundation, write to the Free Software Foundation; we -sometimes make exceptions for this. Our decision will be guided by the -two goals of preserving the free status of all derivatives of our free -software and of promoting the sharing and reuse of software generally. - -NO WARRANTY - -11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO -WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. -EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR -OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, -EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE -ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH -YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL -NECESSARY SERVICING, REPAIR OR CORRECTION. - -12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN -WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY -AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR -DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL -DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM -(INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED -INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF -THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR -OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -END OF TERMS AND CONDITIONS - -How to Apply These Terms to Your New Programs - -If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - -To do so, attach the following notices to the program. It is safest to -attach them to the start of each source file to most effectively convey -the exclusion of warranty; and each file should have at least the -"copyright" line and a pointer to where the full notice is found. - - One line to give the program's name and a brief idea of what it does. - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335 USA - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type - `show w'. This is free software, and you are welcome to redistribute - it under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the -appropriate parts of the General Public License. Of course, the commands -you use may be called something other than `show w' and `show c'; they -could even be mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the - program `Gnomovision' (which makes passes at compilers) written by - James Hacker. - - signature of Ty Coon, 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications -with the library. If this is what you want to do, use the GNU Library -General Public License instead of this License. - -# - -Certain source files distributed by Oracle America, Inc. and/or its -affiliates are subject to the following clarification and special -exception to the GPLv2, based on the GNU Project exception for its -Classpath libraries, known as the GNU Classpath Exception, but only -where Oracle has expressly included in the particular source file's -header the words "Oracle designates this particular file as subject to -the "Classpath" exception as provided by Oracle in the LICENSE file -that accompanied this code." - -You should also note that Oracle includes multiple, independent -programs in this software package. Some of those programs are provided -under licenses deemed incompatible with the GPLv2 by the Free Software -Foundation and others. For example, the package includes programs -licensed under the Apache License, Version 2.0. Such programs are -licensed to you under their original licenses. - -Oracle facilitates your further distribution of this package by adding -the Classpath Exception to the necessary parts of its GPLv2 code, which -permits you to use that code in combination with other independent -modules not licensed under the GPLv2. However, note that this would -not permit you to commingle code under an incompatible license with -Oracle's GPLv2 licensed code by, for example, cutting and pasting such -code into a file also containing Oracle's GPLv2 licensed code and then -distributing the result. Additionally, if you were to remove the -Classpath Exception from any of the files to which it applies and -distribute the result, you would likely be required to license some or -all of the other code in that distribution under the GPLv2 as well, and -since the GPLv2 is incompatible with the license terms of some items -included in the distribution by Oracle, removing the Classpath -Exception could therefore effectively compromise your ability to -further distribute the package. - -Proceed with caution and we recommend that you obtain the advice of a -lawyer skilled in open source matters before removing the Classpath -Exception or making modifications to this package which may -subsequently be redistributed and/or involve the use of third party -software. - -CLASSPATH EXCEPTION -Linking this library statically or dynamically with other modules is -making a combined work based on this library. Thus, the terms and -conditions of the GNU General Public License version 2 cover the whole -combination. - -As a special exception, the copyright holders of this library give you -permission to link this library with independent modules to produce an -executable, regardless of the license terms of these independent -modules, and to copy and distribute the resulting executable under -terms of your choice, provided that you also meet, for each linked -independent module, the terms and conditions of the license of that -module. An independent module is a module which is not derived from or -based on this library. If you modify this library, you may extend this -exception to your version of the library, but you are not obligated to -do so. If you do not wish to do so, delete this exception statement -from your version. diff --git a/plugin/licenses/javax.mail-NOTICE.txt b/plugin/licenses/javax.mail-NOTICE.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/plugin/licenses/owasp-java-html-sanitizer-LICENSE.txt b/plugin/licenses/owasp-java-html-sanitizer-LICENSE.txt deleted file mode 100644 index 379d6e417da..00000000000 --- a/plugin/licenses/owasp-java-html-sanitizer-LICENSE.txt +++ /dev/null @@ -1,234 +0,0 @@ -You may use under either the Apache License Version 2.0 or the BSD -3-Clause License. - ------------------------------------------------------------------- - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - ------------------------------------------------------------------- - -Copyright (c) 2011, Mike Samuel -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugin/licenses/owasp-java-html-sanitizer-NOTICE.txt b/plugin/licenses/owasp-java-html-sanitizer-NOTICE.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/plugin/licenses/owasp-java-html-sanitizer-r239.jar.sha1 b/plugin/licenses/owasp-java-html-sanitizer-r239.jar.sha1 deleted file mode 100644 index 71eaba394da..00000000000 --- a/plugin/licenses/owasp-java-html-sanitizer-r239.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ea8dd89a9e8fcf90c1b666ac0585e7769224da5e \ No newline at end of file diff --git a/plugin/licenses/super-csv-2.4.0.jar.sha1 b/plugin/licenses/super-csv-2.4.0.jar.sha1 deleted file mode 100644 index a0b40213309..00000000000 --- a/plugin/licenses/super-csv-2.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -017f8708c929029dde48bc298deaf3c7ae2452d3 \ No newline at end of file diff --git a/plugin/licenses/super-csv-LICENSE.txt b/plugin/licenses/super-csv-LICENSE.txt deleted file mode 100644 index 9e0ad072b25..00000000000 --- a/plugin/licenses/super-csv-LICENSE.txt +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Apache License - * Version 2.0, January 2004 - * http://www.apache.org/licenses/ - * - * TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - * - * 1. Definitions. - * - * "License" shall mean the terms and conditions for use, reproduction, - * and distribution as defined by Sections 1 through 9 of this document. - * - * "Licensor" shall mean the copyright owner or entity authorized by - * the copyright owner that is granting the License. - * - * "Legal Entity" shall mean the union of the acting entity and all - * other entities that control, are controlled by, or are under common - * control with that entity. For the purposes of this definition, - * "control" means (i) the power, direct or indirect, to cause the - * direction or management of such entity, whether by contract or - * otherwise, or (ii) ownership of fifty percent (50%) or more of the - * outstanding shares, or (iii) beneficial ownership of such entity. - * - * "You" (or "Your") shall mean an individual or Legal Entity - * exercising permissions granted by this License. - * - * "Source" form shall mean the preferred form for making modifications, - * including but not limited to software source code, documentation - * source, and configuration files. - * - * "Object" form shall mean any form resulting from mechanical - * transformation or translation of a Source form, including but - * not limited to compiled object code, generated documentation, - * and conversions to other media types. - * - * "Work" shall mean the work of authorship, whether in Source or - * Object form, made available under the License, as indicated by a - * copyright notice that is included in or attached to the work - * (an example is provided in the Appendix below). - * - * "Derivative Works" shall mean any work, whether in Source or Object - * form, that is based on (or derived from) the Work and for which the - * editorial revisions, annotations, elaborations, or other modifications - * represent, as a whole, an original work of authorship. For the purposes - * of this License, Derivative Works shall not include works that remain - * separable from, or merely link (or bind by name) to the interfaces of, - * the Work and Derivative Works thereof. - * - * "Contribution" shall mean any work of authorship, including - * the original version of the Work and any modifications or additions - * to that Work or Derivative Works thereof, that is intentionally - * submitted to Licensor for inclusion in the Work by the copyright owner - * or by an individual or Legal Entity authorized to submit on behalf of - * the copyright owner. For the purposes of this definition, "submitted" - * means any form of electronic, verbal, or written communication sent - * to the Licensor or its representatives, including but not limited to - * communication on electronic mailing lists, source code control systems, - * and issue tracking systems that are managed by, or on behalf of, the - * Licensor for the purpose of discussing and improving the Work, but - * excluding communication that is conspicuously marked or otherwise - * designated in writing by the copyright owner as "Not a Contribution." - * - * "Contributor" shall mean Licensor and any individual or Legal Entity - * on behalf of whom a Contribution has been received by Licensor and - * subsequently incorporated within the Work. - * - * 2. Grant of Copyright License. Subject to the terms and conditions of - * this License, each Contributor hereby grants to You a perpetual, - * worldwide, non-exclusive, no-charge, royalty-free, irrevocable - * copyright license to reproduce, prepare Derivative Works of, - * publicly display, publicly perform, sublicense, and distribute the - * Work and such Derivative Works in Source or Object form. - * - * 3. Grant of Patent License. Subject to the terms and conditions of - * this License, each Contributor hereby grants to You a perpetual, - * worldwide, non-exclusive, no-charge, royalty-free, irrevocable - * (except as stated in this section) patent license to make, have made, - * use, offer to sell, sell, import, and otherwise transfer the Work, - * where such license applies only to those patent claims licensable - * by such Contributor that are necessarily infringed by their - * Contribution(s) alone or by combination of their Contribution(s) - * with the Work to which such Contribution(s) was submitted. If You - * institute patent litigation against any entity (including a - * cross-claim or counterclaim in a lawsuit) alleging that the Work - * or a Contribution incorporated within the Work constitutes direct - * or contributory patent infringement, then any patent licenses - * granted to You under this License for that Work shall terminate - * as of the date such litigation is filed. - * - * 4. Redistribution. You may reproduce and distribute copies of the - * Work or Derivative Works thereof in any medium, with or without - * modifications, and in Source or Object form, provided that You - * meet the following conditions: - * - * (a) You must give any other recipients of the Work or - * Derivative Works a copy of this License; and - * - * (b) You must cause any modified files to carry prominent notices - * stating that You changed the files; and - * - * (c) You must retain, in the Source form of any Derivative Works - * that You distribute, all copyright, patent, trademark, and - * attribution notices from the Source form of the Work, - * excluding those notices that do not pertain to any part of - * the Derivative Works; and - * - * (d) If the Work includes a "NOTICE" text file as part of its - * distribution, then any Derivative Works that You distribute must - * include a readable copy of the attribution notices contained - * within such NOTICE file, excluding those notices that do not - * pertain to any part of the Derivative Works, in at least one - * of the following places: within a NOTICE text file distributed - * as part of the Derivative Works; within the Source form or - * documentation, if provided along with the Derivative Works; or, - * within a display generated by the Derivative Works, if and - * wherever such third-party notices normally appear. The contents - * of the NOTICE file are for informational purposes only and - * do not modify the License. You may add Your own attribution - * notices within Derivative Works that You distribute, alongside - * or as an addendum to the NOTICE text from the Work, provided - * that such additional attribution notices cannot be construed - * as modifying the License. - * - * You may add Your own copyright statement to Your modifications and - * may provide additional or different license terms and conditions - * for use, reproduction, or distribution of Your modifications, or - * for any such Derivative Works as a whole, provided Your use, - * reproduction, and distribution of the Work otherwise complies with - * the conditions stated in this License. - * - * 5. Submission of Contributions. Unless You explicitly state otherwise, - * any Contribution intentionally submitted for inclusion in the Work - * by You to the Licensor shall be under the terms and conditions of - * this License, without any additional terms or conditions. - * Notwithstanding the above, nothing herein shall supersede or modify - * the terms of any separate license agreement you may have executed - * with Licensor regarding such Contributions. - * - * 6. Trademarks. This License does not grant permission to use the trade - * names, trademarks, service marks, or product names of the Licensor, - * except as required for reasonable and customary use in describing the - * origin of the Work and reproducing the content of the NOTICE file. - * - * 7. Disclaimer of Warranty. Unless required by applicable law or - * agreed to in writing, Licensor provides the Work (and each - * Contributor provides its Contributions) on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - * implied, including, without limitation, any warranties or conditions - * of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - * PARTICULAR PURPOSE. You are solely responsible for determining the - * appropriateness of using or redistributing the Work and assume any - * risks associated with Your exercise of permissions under this License. - * - * 8. Limitation of Liability. In no event and under no legal theory, - * whether in tort (including negligence), contract, or otherwise, - * unless required by applicable law (such as deliberate and grossly - * negligent acts) or agreed to in writing, shall any Contributor be - * liable to You for damages, including any direct, indirect, special, - * incidental, or consequential damages of any character arising as a - * result of this License or out of the use or inability to use the - * Work (including but not limited to damages for loss of goodwill, - * work stoppage, computer failure or malfunction, or any and all - * other commercial damages or losses), even if such Contributor - * has been advised of the possibility of such damages. - * - * 9. Accepting Warranty or Additional Liability. While redistributing - * the Work or Derivative Works thereof, You may choose to offer, - * and charge a fee for, acceptance of support, warranty, indemnity, - * or other liability obligations and/or rights consistent with this - * License. However, in accepting such obligations, You may act only - * on Your own behalf and on Your sole responsibility, not on behalf - * of any other Contributor, and only if You agree to indemnify, - * defend, and hold each Contributor harmless for any liability - * incurred by, or claims asserted against, such Contributor by reason - * of your accepting any such warranty or additional liability. - * - * END OF TERMS AND CONDITIONS - * - * APPENDIX: How to apply the Apache License to your work. - * - * To apply the Apache License to your work, attach the following - * boilerplate notice, with the fields enclosed by brackets "[]" - * replaced with your own identifying information. (Don't include - * the brackets!) The text should be enclosed in the appropriate - * comment syntax for the file format. We also recommend that a - * file or class name and description of purpose be included on the - * same "printed page" as the copyright notice for easier - * identification within third-party archives. - * - * Copyright 2007 Kasper B. Graversen - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ diff --git a/plugin/licenses/super-csv-NOTICE.txt b/plugin/licenses/super-csv-NOTICE.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/plugin/licenses/unboundid-ldapsdk-3.2.0.jar.sha1 b/plugin/licenses/unboundid-ldapsdk-3.2.0.jar.sha1 deleted file mode 100644 index 23697f364e9..00000000000 --- a/plugin/licenses/unboundid-ldapsdk-3.2.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f76725e5a215ea468ecda06a8d66a809281e685f \ No newline at end of file diff --git a/plugin/licenses/unboundid-ldapsdk-LICENSE.txt b/plugin/licenses/unboundid-ldapsdk-LICENSE.txt deleted file mode 100644 index e57554e5692..00000000000 --- a/plugin/licenses/unboundid-ldapsdk-LICENSE.txt +++ /dev/null @@ -1,91 +0,0 @@ - UnboundID LDAP SDK Free Use License - -THIS IS AN AGREEMENT BETWEEN YOU ("YOU") AND UNBOUNDID CORP. ("UNBOUNDID") -REGARDING YOUR USE OF UNBOUNDID LDAP SDK FOR JAVA AND ANY ASSOCIATED -DOCUMENTATION, OBJECT CODE, COMPILED LIBRARIES, SOURCE CODE AND SOURCE FILES OR -OTHER MATERIALS MADE AVAILABLE BY UNBOUNDID (COLLECTIVELY REFERRED TO IN THIS -AGREEMENT AS THE ("SDK"). - -BY INSTALLING, ACCESSING OR OTHERWISE USING THE SDK, YOU ACCEPT THE TERMS OF -THIS AGREEMENT. IF YOU DO NOT AGREE TO THE TERMS OF THIS AGREEMENT, DO NOT -INSTALL, ACCESS OR USE THE SDK. - -USE OF THE SDK. Subject to your compliance with this Agreement, UnboundID -grants to You a non-exclusive, royalty-free license, under UnboundID's -intellectual property rights in the SDK, to use, reproduce, modify and -distribute this release of the SDK; provided that no license is granted herein -under any patents that may be infringed by your modifications, derivative works -or by other works in which the SDK may be incorporated (collectively, your -"Applications"). You may reproduce and redistribute the SDK with your -Applications provided that you (i) include this license file and an -unmodified copy of the unboundid-ldapsdk-se.jar file; and (ii) such -redistribution is subject to a license whose terms do not conflict with or -contradict the terms of this Agreement. You may also reproduce and redistribute -the SDK without your Applications provided that you redistribute the SDK -complete and unmodified (i.e., with all "read me" files, copyright notices, and -other legal notices and terms that UnboundID has included in the SDK). - -SCOPE OF LICENSES. This Agreement does not grant You the right to use any -UnboundID intellectual property which is not included as part of the SDK. The -SDK is licensed, not sold. This Agreement only gives You some rights to use -the SDK. UnboundID reserves all other rights. Unless applicable law gives You -more rights despite this limitation, You may use the SDK only as expressly -permitted in this Agreement. - -SUPPORT. UnboundID is not obligated to provide any technical or other support -("Support Services") for the SDK to You under this Agreement. However, if -UnboundID chooses to provide any Support Services to You, Your use of such -Support Services will be governed by then-current UnboundID support policies. - -TERMINATION. UnboundID reserves the right to discontinue offering the SDK and -to modify the SDK at any time in its sole discretion. Notwithstanding anything -contained in this Agreement to the contrary, UnboundID may also, in its sole -discretion, terminate or suspend access to the SDK to You or any end user at -any time. In addition, if you fail to comply with the terms of this Agreement, -then any rights granted herein will be automatically terminated if such failure -is not corrected within 30 days of the initial notification of such failure. -You acknowledge that termination and/or monetary damages may not be a -sufficient remedy if You breach this Agreement and that UnboundID will be -entitled, without waiving any other rights or remedies, to injunctive or -equitable relief as may be deemed proper by a court of competent jurisdiction -in the event of a breach. UnboundID may also terminate this Agreement if the -SDK becomes, or in UnboundID?s reasonable opinion is likely to become, the -subject of a claim of intellectual property infringement or trade secret -misappropriation. All rights and licenses granted herein will simultaneously -and automatically terminate upon termination of this Agreement for any reason. - -DISCLAIMER OF WARRANTY. THE SDK IS PROVIDED "AS IS" AND UNBOUNDID DOES NOT -WARRANT THAT THE SDK WILL BE ERROR-FREE, VIRUS-FREE, WILL PERFORM IN AN -UNINTERRUPTED, SECURE OR TIMELY MANNER, OR WILL INTEROPERATE WITH OTHER -HARDWARE, SOFTWARE, SYSTEMS OR DATA. TO THE MAXIMUM EXTENT ALLOWED BY LAW, ALL -CONDITIONS, REPRESENTATIONS AND WARRANTIES, WHETHER EXPRESS, IMPLIED, STATUTORY -OR OTHERWISE INCLUDING, WITHOUT LIMITATION, ANY IMPLIED WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE (EVEN IF UNBOUNDID HAD BEEN -INFORMED OF SUCH PURPOSE), OR NON-INFRINGEMENT OF THIRD PARTY RIGHTS ARE HEREBY -DISCLAIMED. - -LIMITATION OF LIABILITY. IN NO EVENT WILL UNBOUNDID OR ITS SUPPLIERS BE LIABLE -FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, LOST PROFITS, -REVENUE, DATA OR DATA USE, BUSINESS INTERRUPTION, COST OF COVER, DIRECT, -INDIRECT, SPECIAL, PUNITIVE, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND) -ARISING OUT OF THE USE OF OR INABILITY TO USE THE SDK OR IN ANY WAY RELATED TO -THIS AGREEMENT, EVEN IF UNBOUNDID HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -DAMAGES. - -ADDITIONAL RIGHTS. Certain states do not allow the exclusion of implied -warranties or limitation of liability for certain kinds of damages, so the -exclusion of limited warranties and limitation of liability set forth above may -not apply to You. - -EXPORT RESTRICTIONS. The SDK is subject to United States export control laws. -You acknowledge and agree that You are responsible for compliance with all -domestic and international export laws and regulations that apply to the SDK. - -MISCELLANEOUS. This Agreement constitutes the entire agreement with respect to -the SDK. If any provision of this Agreement shall be held to be invalid, -illegal or unenforceable, the validity, legality and enforceability of the -remaining provisions shall in no way be affected or impaired thereby. This -Agreement and performance hereunder shall be governed by and construed in -accordance with the laws of the State of Texas without regard to its conflict -of laws rules. Any disputes related to this Agreement shall be exclusively -litigated in the state or federal courts located in Travis County, Texas. diff --git a/plugin/licenses/unboundid-ldapsdk-NOTICE.txt b/plugin/licenses/unboundid-ldapsdk-NOTICE.txt deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/plugin/logstash/build.gradle b/plugin/logstash/build.gradle index 90906c70b30..7f6ed04fe78 100644 --- a/plugin/logstash/build.gradle +++ b/plugin/logstash/build.gradle @@ -1,4 +1,4 @@ -evaluationDependsOn(':x-pack-elasticsearch:plugin:core') +evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { @@ -6,22 +6,16 @@ esplugin { description 'Elasticsearch Expanded Pack Plugin - Logstash' classname 'org.elasticsearch.xpack.logstash.Logstash' hasNativeController false - requiresKeystore true + requiresKeystore false extendedPlugins = ['x-pack-core'] - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') } archivesBaseName = 'x-pack-logstash' -licenseHeaders.enabled = false - -integTest.enabled = false - dependencies { provided "org.elasticsearch:elasticsearch:${version}" provided "org.elasticsearch.plugin:x-pack-core:${version}" - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } @@ -30,5 +24,7 @@ dependencyLicenses { } run { - plugin ':x-pack-elasticsearch:plugin:core' + plugin xpackModule('core') } + +integTest.enabled = false diff --git a/plugin/ml-cpp-snapshot/build.gradle b/plugin/ml-cpp-snapshot/build.gradle deleted file mode 100644 index 60500ce7b8b..00000000000 --- a/plugin/ml-cpp-snapshot/build.gradle +++ /dev/null @@ -1,170 +0,0 @@ -import com.amazonaws.AmazonServiceException -import com.amazonaws.ClientConfiguration -import com.amazonaws.auth.AWSCredentials -import com.amazonaws.auth.BasicAWSCredentials -import com.amazonaws.services.s3.AmazonS3Client -import com.amazonaws.services.s3.model.S3Object -import com.amazonaws.services.s3.model.ObjectMetadata -import com.bettercloud.vault.Vault -import com.bettercloud.vault.VaultConfig -import com.bettercloud.vault.response.LogicalResponse -import java.nio.file.Files -import java.nio.file.attribute.PosixFilePermission -import java.nio.file.attribute.PosixFilePermissions -import org.elasticsearch.gradle.VersionProperties - -apply plugin: 'distribution' - -buildscript { - repositories { - mavenCentral() - } - dependencies { - classpath group: 'com.bettercloud', name: 'vault-java-driver', version:"1.1.0" - classpath 'com.amazonaws:aws-java-sdk-s3:1.10.33' - if (JavaVersion.current() > JavaVersion.VERSION_1_8) { - classpath 'com.sun.xml.bind:jaxb-impl:2.2.3-1' // pulled in as external dependency to work on java 9 - } - } -} - -ext.version = VersionProperties.elasticsearch - -// This project pulls a snapshot version of the ML cpp artifacts and sets that as the artifact -// for this project so it can be used with dependency substitution. We do not use gradle's -// handling of S3 as a maven repo due to the dynamically generated creds being slow to propagate, -// necessitating retries. - -void checkJavaVersion() { - /** - * The Elastic Secrets vault is served via HTTPS with a Let's Encrypt certificate. The root certificates that cross-signed the Let's - * Encrypt certificates were not trusted by the JDK until 8u101. Therefore, we enforce that the JDK is at least 8u101 here. - */ - final String javaVersion = System.getProperty('java.version') - final String javaVendor = System.getProperty('java.vendor') - def matcher = javaVersion =~ /1\.8\.0(?:_(\d+))?/ - boolean matches = matcher.matches() - if (matches) { - final int update - if (matcher.group(1) == null) { - update = 0 - } else { - update = matcher.group(1).toInteger() - } - if (update < 101) { - throw new GradleException("JDK ${javaVendor} ${javaVersion} does not have necessary root certificates " + - "(https://bugs.openjdk.java.net/browse/JDK-8154757), update your JDK to at least JDK 8u101+") - } - } -} - -void setupVaultAuthMethod() { - String VAULT_BASE_URL = 'https://secrets.elastic.co:8200' - String VAULT_ROLE_ID = "8e90dd88-5a8e-9c12-0da9-5439f293ff97" - String VAULT_SECRET_ID = System.env.VAULT_SECRET_ID - // get an authentication token with vault - String homePath = System.properties['user.home'] - File githubToken = file("${homePath}/.elastic/github.token") - String vaultAuthBody = null - URL vaultUrl = null - if (githubToken.exists()) { - try { - Set perms = Files.getPosixFilePermissions(githubToken.toPath()) - if (perms.equals(PosixFilePermissions.fromString("rw-------")) == false) { - throw new GradleException('github.token must have 600 permissions') - } - } catch (UnsupportedOperationException e) { - // Assume this isn't a POSIX file system - } - vaultUrl = new URL(VAULT_BASE_URL + '/v1/auth/github/login') - vaultAuthBody = "{\"token\": \"${githubToken.getText('UTF-8').trim()}\"}" - } else if (VAULT_SECRET_ID != null) { - vaultUrl = new URL(VAULT_BASE_URL + '/v1/auth/approle/login') - vaultAuthBody = "{\"role_id\": \"${VAULT_ROLE_ID}\", \"secret_id\": \"${VAULT_SECRET_ID}\"}" - } else { - throw new GradleException('Missing ~/.elastic/github.token file or VAULT_SECRET_ID environment variable, needed to authenticate with vault for secrets') - } - project.ext.vaultAuthBody = vaultAuthBody - project.ext.vaultUrl = vaultUrl -} - -void getZip(File snapshotZip) { - HttpURLConnection vaultConn = (HttpURLConnection) vaultUrl.openConnection() - vaultConn.setRequestProperty('Content-Type', 'application/json') - vaultConn.setRequestMethod('PUT') - vaultConn.setDoOutput(true) - vaultConn.outputStream.withWriter('UTF-8') { writer -> - writer.write(vaultAuthBody) - } - vaultConn.connect() - Object authResponse = new groovy.json.JsonSlurper().parseText(vaultConn.content.text) - VaultConfig config = new VaultConfig('https://secrets.elastic.co:8200', authResponse.auth.client_token) - Vault vault = new Vault(config) - LogicalResponse secret = vault.logical().read("aws-dev/creds/prelertartifacts") - final AWSCredentials creds = new BasicAWSCredentials(secret.data.get('access_key'), secret.data.get('secret_key')) - - // the keys may take a while to propagate, so wait up to 60 seconds retrying - final AmazonS3Client client = new AmazonS3Client(creds) - final String key = "maven/org/elasticsearch/ml/ml-cpp/${version}/ml-cpp-${version}.zip" - int retries = 120 - while (retries > 0) { - try { - File snapshotMd5 = new File(snapshotZip.toString() + '.md5') - // do a HEAD first to check the zip hash against the local file - ObjectMetadata metadata = client.getObjectMetadata('prelert-artifacts', key) - String remoteMd5 = metadata.getETag() - if (snapshotZip.exists()) { - // do a HEAD first to check the zip hash against the local file - String localMd5 = snapshotMd5.getText('UTF-8') - if (remoteMd5.equals(localMd5)) { - logger.info('Using cached ML snapshot') - return - } - } - S3Object zip = client.getObject('prelert-artifacts', key) - InputStream zipStream = zip.getObjectContent() - try { - project.delete(snapshotZip, snapshotZip) - Files.copy(zipStream, snapshotZip.toPath()) - } finally { - zipStream.close() - } - snapshotMd5.setText(remoteMd5, 'UTF-8') - return - } catch (AmazonServiceException e) { - if (e.getStatusCode() != 403) { - throw new GradleException('Error while trying to get ml-cpp snapshot: ' + e.getMessage(), e) - } - sleep(500) - retries-- - } - } - throw new GradleException('Could not access ml-cpp artifacts. Timed out after 60 seconds') -} - -File snapshotZip = new File(projectDir, ".cache/ml-cpp-${version}.zip") -task downloadMachineLearningSnapshot { - onlyIf { - // skip if machine-learning-cpp is being built locally - findProject(':machine-learning-cpp') == null && - // skip for offline builds - just rely on the artifact already having been downloaded before here - project.gradle.startParameter.isOffline() == false - } - doFirst { - snapshotZip.parentFile.mkdirs() - getZip(snapshotZip) - } -} - -gradle.taskGraph.whenReady { taskGraph -> - // skip if machine-learning-cpp is being built locally and also for offline builds - if (findProject(':machine-learning-cpp') == null && project.gradle.startParameter.isOffline() == false) { - // do validation of token/java version up front, don't wait for the task to run - checkJavaVersion() - setupVaultAuthMethod() - } -} - -artifacts { - 'default' file: snapshotZip, name: 'ml-cpp', type: 'zip', builtBy: downloadMachineLearningSnapshot -} diff --git a/plugin/ml/build.gradle b/plugin/ml/build.gradle index 54bdc54a52d..adcd98b7a32 100644 --- a/plugin/ml/build.gradle +++ b/plugin/ml/build.gradle @@ -1,4 +1,7 @@ -evaluationDependsOn(':x-pack-elasticsearch:plugin:core') +import com.carrotsearch.gradle.junit4.RandomizedTestingTask +import org.elasticsearch.gradle.BuildPlugin + +evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { @@ -6,24 +9,18 @@ esplugin { description 'Elasticsearch Expanded Pack Plugin - Machine Learning' classname 'org.elasticsearch.xpack.ml.MachineLearning' hasNativeController true - requiresKeystore true + requiresKeystore false extendedPlugins = ['x-pack-core'] - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') } archivesBaseName = 'x-pack-ml' -licenseHeaders.enabled = false - -integTest.enabled = false - configurations { nativeBundle { resolutionStrategy.dependencySubstitution { if (findProject(':machine-learning-cpp') != null) { substitute module("org.elasticsearch.ml:ml-cpp") with project(":machine-learning-cpp") } else { - substitute module("org.elasticsearch.ml:ml-cpp") with project("${project.parent.path}:ml-cpp-snapshot") + substitute module("org.elasticsearch.ml:ml-cpp") with project("${project.path}:cpp-snapshot") } } } @@ -47,9 +44,9 @@ dependencies { provided "org.elasticsearch:elasticsearch:${version}" provided "org.elasticsearch.plugin:x-pack-core:${version}" - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // This should not be here - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') // ml deps compile 'net.sf.supercsv:super-csv:2.4.0' @@ -57,10 +54,48 @@ dependencies { testCompile 'org.ini4j:ini4j:0.5.2' } +configurations { + testArtifacts.extendsFrom testRuntime +} +task testJar(type: Jar) { + appendix 'test' + from sourceSets.test.output +} +artifacts { + // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions + archives jar + testArtifacts testJar +} + dependencyLicenses { ignoreSha 'x-pack-core' } run { - plugin ':x-pack-elasticsearch:plugin:core' + plugin xpackModule('core') +} + +// xpack modules are installed in real clusters as the meta plugin, so +// installing them as individual plugins for integ tests doesn't make sense, +// so we disable integ tests +integTest.enabled = false + +// Instead we create a separate task to run the +// tests based on ESIntegTestCase +task internalClusterTest(type: RandomizedTestingTask, + group: JavaBasePlugin.VERIFICATION_GROUP, + description: 'Multi-node tests', + dependsOn: test.dependsOn) { + configure(BuildPlugin.commonTestConfig(project)) + classpath = project.test.classpath + testClassesDir = project.test.testClassesDir + include '**/*IT.class' + systemProperty 'es.set.netty.runtime.available.processors', 'false' +} +check.dependsOn internalClusterTest +internalClusterTest.mustRunAfter test + +// also add an "alias" task to make typing on the command line easier +task icTest { + dependsOn internalClusterTest } diff --git a/plugin/ml-cpp-snapshot/.gitignore b/plugin/ml/cpp-snapshot/.gitignore similarity index 100% rename from plugin/ml-cpp-snapshot/.gitignore rename to plugin/ml/cpp-snapshot/.gitignore diff --git a/plugin/ml/cpp-snapshot/build.gradle b/plugin/ml/cpp-snapshot/build.gradle new file mode 100644 index 00000000000..c2a396e72b2 --- /dev/null +++ b/plugin/ml/cpp-snapshot/build.gradle @@ -0,0 +1,52 @@ +import java.net.HttpURLConnection +import org.elasticsearch.gradle.VersionProperties + +apply plugin: 'distribution' + +ext.version = VersionProperties.elasticsearch + +// This project pulls a snapshot version of the ML cpp artifacts and sets that as the artifact +// for this project so it can be used with dependency substitution. + +void getZip(File snapshotZip) { + String zipUrl = "http://prelert-artifacts.s3.amazonaws.com/maven/org/elasticsearch/ml/ml-cpp/${version}/ml-cpp-${version}.zip" + File snapshotMd5 = new File(snapshotZip.toString() + '.md5') + HttpURLConnection conn = (HttpURLConnection) new URL(zipUrl).openConnection(); + + // do a HEAD first to check the zip hash against the local file + conn.setRequestMethod('HEAD'); + if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) { + throw new GradleException('ML cpp snapshot does not exist') + } + + String remoteMd5 = conn.getHeaderField('ETag') + if (snapshotZip.exists()) { + // do a HEAD first to check the zip hash against the local file + String localMd5 = snapshotMd5.getText('UTF-8') + if (remoteMd5.equals(localMd5)) { + logger.info('Using cached ML snapshot') + return + } + } + + snapshotZip.bytes = new URL(zipUrl).bytes + snapshotMd5.setText(remoteMd5, 'UTF-8') +} + +File snapshotZip = new File(projectDir, ".cache/ml-cpp-${version}.zip") +task downloadMachineLearningSnapshot { + onlyIf { + // skip if machine-learning-cpp is being built locally + findProject(':machine-learning-cpp') == null && + // skip for offline builds - just rely on the artifact already having been downloaded before here + project.gradle.startParameter.isOffline() == false + } + doFirst { + snapshotZip.parentFile.mkdirs() + getZip(snapshotZip) + } +} + +artifacts { + 'default' file: snapshotZip, name: 'ml-cpp', type: 'zip', builtBy: downloadMachineLearningSnapshot +} diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index ea50b408cea..5cd61a6f48a 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -100,7 +100,6 @@ import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.notifications.AuditMessage; import org.elasticsearch.xpack.core.ml.notifications.AuditorField; import org.elasticsearch.xpack.core.persistent.CompletionPersistentTaskAction; @@ -111,6 +110,7 @@ import org.elasticsearch.xpack.core.persistent.PersistentTasksService; import org.elasticsearch.xpack.core.persistent.RemovePersistentTaskAction; import org.elasticsearch.xpack.core.persistent.StartPersistentTaskAction; import org.elasticsearch.xpack.core.persistent.UpdatePersistentTaskStatusAction; +import org.elasticsearch.xpack.core.template.TemplateUtils; import org.elasticsearch.xpack.ml.action.TransportCloseJobAction; import org.elasticsearch.xpack.ml.action.TransportDeleteCalendarAction; import org.elasticsearch.xpack.ml.action.TransportDeleteCalendarEventAction; @@ -162,6 +162,7 @@ import org.elasticsearch.xpack.ml.job.UpdateJobProcessNotifier; import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizer; import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizerFactory; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; import org.elasticsearch.xpack.ml.job.process.NativeController; @@ -217,7 +218,6 @@ import org.elasticsearch.xpack.ml.rest.results.RestGetOverallBucketsAction; import org.elasticsearch.xpack.ml.rest.results.RestGetRecordsAction; import org.elasticsearch.xpack.ml.rest.validate.RestValidateDetectorAction; import org.elasticsearch.xpack.ml.rest.validate.RestValidateJobConfigAction; -import org.elasticsearch.xpack.core.template.TemplateUtils; import java.io.IOException; import java.math.BigInteger; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java index 78f017f6c60..e421819f732 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java @@ -21,11 +21,11 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 2fdd447cf2b..91ae2c118c8 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -22,12 +22,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; -import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.util.Map; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index 553243a526c..36abf5f95d4 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -18,12 +18,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.persistence.JobDataDeleter; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.notifications.Auditor; import java.util.Collections; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java index 9d4a4160343..3b09377b477 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java @@ -19,13 +19,13 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ForecastJobAction; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.ForecastParams; import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.ForecastParams; import java.io.IOException; import java.util.List; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java index f3a947f5bc0..6ef07232ad0 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java @@ -15,9 +15,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.core.ml.job.persistence.BucketsQueryBuilder; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; public class TransportGetBucketsAction extends HandledTransportAction { diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index 2d215f48660..7e3593a3f7d 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -15,16 +15,16 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; -import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.ml.MLMetadataField; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; +import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.core.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.util.Collections; import java.util.List; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java index ed56731ca69..319a5e43b3b 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -18,12 +18,10 @@ import org.elasticsearch.xpack.core.ml.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.job.persistence.CalendarQueryBuilder; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.util.Collections; -import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; - public class TransportGetCalendarsAction extends HandledTransportAction { private final JobProvider jobProvider; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java index 12988e442ed..25d0cc0cdf8 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java @@ -16,7 +16,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCategoriesAction; import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; public class TransportGetCategoriesAction extends HandledTransportAction { diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index ed9f5e2dac5..b61b2762dde 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -27,12 +27,12 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; import org.elasticsearch.xpack.core.ml.action.util.PageParams; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import java.io.IOException; import java.util.ArrayList; @@ -110,7 +110,7 @@ public class TransportGetFiltersAction extends HandledTransportAction() { diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java index b827a5c8cd9..4a54469a87a 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java @@ -15,9 +15,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetInfluencersAction; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.core.ml.job.persistence.InfluencersQueryBuilder; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; public class TransportGetInfluencersAction extends HandledTransportAction { diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index ae308a59e68..8189d06e129 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -29,10 +29,10 @@ import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import java.io.IOException; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java index 27603025c38..17e367128a1 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java @@ -15,9 +15,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.util.stream.Collectors; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index a1fb4078709..989d74038b8 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -26,22 +26,22 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.BucketsQueryBuilder; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsAggregator; -import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsCollector; -import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProcessor; -import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProvider; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.OverallBucket; import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.Intervals; +import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsAggregator; +import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsCollector; +import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProcessor; +import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProvider; import java.util.HashSet; import java.util.List; @@ -257,7 +257,7 @@ public class TransportGetOverallBucketsAction extends HandledTransportAction { diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index e429dd1922c..087b82c56d9 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -55,13 +55,13 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTaskStatus; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.persistent.AllocatedPersistentTask; import org.elasticsearch.xpack.core.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.core.persistent.PersistentTasksExecutor; import org.elasticsearch.xpack.core.persistent.PersistentTasksService; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import java.io.IOException; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java index 925d9966917..92916e18f68 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -22,13 +22,13 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; -import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.io.IOException; import java.util.Collections; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java index 41868da955a..b84d0cf651e 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -20,15 +20,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.core.ml.job.persistence.JobDataDeleter; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.util.Date; import java.util.function.Consumer; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java index 8aa06eadfa5..beee767f3a1 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java @@ -17,7 +17,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.util.HashSet; import java.util.Set; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java index 9fedf7975c3..04ca454e5a0 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java @@ -26,9 +26,9 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.io.IOException; import java.util.function.Consumer; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java index 622620e0fe4..0bf52a98e11 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java @@ -11,14 +11,14 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.BucketsQueryBuilder; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.notifications.Auditor; import java.util.Collections; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index c3c076f0e29..472574c6c0b 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -39,12 +39,12 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.messages.Messages; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.persistence.JobStorageDeletionTask; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.process.autodetect.UpdateParams; import org.elasticsearch.xpack.ml.notifications.Auditor; @@ -203,7 +203,7 @@ public class JobManager extends AbstractComponent { } @Override - public ClusterState execute(ClusterState currentState) throws Exception { + public ClusterState execute(ClusterState currentState) { return updateClusterState(job, false, currentState); } }); @@ -342,7 +342,7 @@ public class JobManager extends AbstractComponent { public void updateProcessOnCalendarChanged(List calendarJobIds) { ClusterState clusterState = clusterService.state(); Set expandedJobIds = new HashSet<>(); - calendarJobIds.stream().forEach(jobId -> expandedJobIds.addAll(expandJobIds(jobId, true, clusterState))); + calendarJobIds.forEach(jobId -> expandedJobIds.addAll(expandJobIds(jobId, true, clusterState))); for (String jobId : expandedJobIds) { if (isJobOpen(clusterState, jobId)) { updateJobProcessNotifier.submitJobUpdate(UpdateParams.scheduledEventsUpdate(jobId)); diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedBucketsIterator.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java similarity index 96% rename from plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedBucketsIterator.java rename to plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java index bb28cd911f8..2c8d440b4ee 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedBucketsIterator.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ml.job.persistence; +package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.Client; diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedDocumentsIterator.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java similarity index 91% rename from plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedDocumentsIterator.java rename to plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java index d59fcaa5828..cf50579a0e5 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedDocumentsIterator.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ml.job.persistence; +package org.elasticsearch.xpack.ml.job.persistence; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.search.SearchRequest; @@ -15,9 +15,11 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; +import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; +import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import java.util.ArrayDeque; -import java.util.Arrays; +import java.util.Collections; import java.util.Deque; import java.util.NoSuchElementException; import java.util.Objects; @@ -90,7 +92,7 @@ public abstract class BatchedDocumentsIterator { isScrollInitialised = true; SearchRequest searchRequest = new SearchRequest(index); - searchRequest.indicesOptions(JobProvider.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); searchRequest.scroll(CONTEXT_ALIVE_DURATION); searchRequest.source(new SearchSourceBuilder() .size(BATCH_SIZE) @@ -116,7 +118,7 @@ public abstract class BatchedDocumentsIterator { count += hits.length; if (!hasNext() && scrollId != null) { - client.prepareClearScroll().setScrollIds(Arrays.asList(scrollId)).get(); + client.prepareClearScroll().setScrollIds(Collections.singletonList(scrollId)).get(); } return results; } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedInfluencersIterator.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java similarity index 96% rename from plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedInfluencersIterator.java rename to plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java index 605b0284f29..9a1a45212a4 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedInfluencersIterator.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ml.job.persistence; +package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.Client; @@ -35,4 +35,4 @@ class BatchedInfluencersIterator extends BatchedResultsIterator { Influencer influencer = Influencer.PARSER.apply(parser, null); return new Result<>(hit.getIndex(), influencer); } -} \ No newline at end of file +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedRecordsIterator.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java similarity index 96% rename from plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedRecordsIterator.java rename to plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java index ca7da5a3cba..50c1f523e65 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedRecordsIterator.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ml.job.persistence; +package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.client.Client; @@ -35,4 +35,4 @@ class BatchedRecordsIterator extends BatchedResultsIterator { AnomalyRecord record = AnomalyRecord.PARSER.apply(parser, null); return new Result<>(hit.getIndex(), record); } -} \ No newline at end of file +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedResultsIterator.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedResultsIterator.java similarity index 89% rename from plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedResultsIterator.java rename to plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedResultsIterator.java index daf92342771..c02dfd39574 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/BatchedResultsIterator.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedResultsIterator.java @@ -3,11 +3,13 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ml.job.persistence; +package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.client.Client; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.ResultsFilterBuilder; import org.elasticsearch.xpack.core.ml.job.results.Result; public abstract class BatchedResultsIterator extends BatchedDocumentsIterator> { diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobProvider.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java similarity index 98% rename from plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobProvider.java rename to plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index 15d15054883..879c19e36b4 100644 --- a/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/JobProvider.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ml.job.persistence; +package org.elasticsearch.xpack.ml.job.persistence; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; @@ -79,7 +79,14 @@ import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.BucketsQueryBuilder; +import org.elasticsearch.xpack.core.ml.job.persistence.CalendarQueryBuilder; +import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; +import org.elasticsearch.xpack.core.ml.job.persistence.RecordsQueryBuilder; +import org.elasticsearch.xpack.core.ml.job.persistence.ResultsFilterBuilder; +import org.elasticsearch.xpack.core.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; @@ -95,6 +102,7 @@ import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.ModelPlot; import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import org.elasticsearch.xpack.core.security.support.Exceptions; import java.io.IOException; @@ -491,11 +499,6 @@ public class JobProvider { } } - public static IndicesOptions addIgnoreUnavailable(IndicesOptions indicesOptions) { - return IndicesOptions.fromOptions(true, indicesOptions.allowNoIndices(), indicesOptions.expandWildcardsOpen(), - indicesOptions.expandWildcardsClosed(), indicesOptions); - } - /** * Search for buckets with the parameters in the {@link BucketsQueryBuilder} * Uses the internal client, so runs as the _xpack user @@ -515,7 +518,7 @@ public class JobProvider { String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); SearchRequest searchRequest = new SearchRequest(indexName); searchRequest.source(query.build()); - searchRequest.indicesOptions(addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, ActionListener.wrap(searchResponse -> { @@ -635,7 +638,7 @@ public class JobProvider { indexName, CategoryDefinition.CATEGORY_ID.getPreferredName(), from, size); SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.indicesOptions(addIgnoreUnavailable(searchRequest.indicesOptions())); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(searchRequest.indicesOptions())); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); if (categoryId != null) { sourceBuilder.query(QueryBuilders.termQuery(CategoryDefinition.CATEGORY_ID.getPreferredName(), categoryId)); @@ -677,7 +680,7 @@ public class JobProvider { SearchSourceBuilder searchSourceBuilder = recordsQueryBuilder.build(); SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.indicesOptions(addIgnoreUnavailable(searchRequest.indicesOptions())); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(searchRequest.indicesOptions())); searchRequest.source(recordsQueryBuilder.build()); LOGGER.trace("ES API CALL: search all of records from index {} with query {}", indexName, searchSourceBuilder); @@ -723,7 +726,7 @@ public class JobProvider { .filter(new TermsQueryBuilder(Result.RESULT_TYPE.getPreferredName(), Influencer.RESULT_TYPE_VALUE)); SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.indicesOptions(addIgnoreUnavailable(searchRequest.indicesOptions())); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(searchRequest.indicesOptions())); FieldSortBuilder sb = query.getSortField() == null ? SortBuilders.fieldSort(ElasticsearchMappings.ES_DOC) : new FieldSortBuilder(query.getSortField()).order(query.isSortDescending() ? SortOrder.DESC : SortOrder.ASC); searchRequest.source(new SearchSourceBuilder().query(qb).from(query.getFrom()).size(query.getSize()).sort(sb)); @@ -839,7 +842,7 @@ public class JobProvider { indexName, sortField, from, size); SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.indicesOptions(addIgnoreUnavailable(searchRequest.indicesOptions())); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(searchRequest.indicesOptions())); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); sourceBuilder.sort(sb); sourceBuilder.query(finalQuery); @@ -866,7 +869,7 @@ public class JobProvider { try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { searchResponse = client.prepareSearch(indexName) - .setIndicesOptions(addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)) + .setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)) .setQuery(new TermsQueryBuilder(Result.RESULT_TYPE.getPreferredName(), ModelPlot.RESULT_TYPE_VALUE)) .setFrom(from).setSize(size) .get(); diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrl.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrl.java index fd1ca460a7d..35033fcfe84 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrl.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrl.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.job.process; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -62,16 +61,10 @@ public class ProcessCtrl { public static final Setting MAX_ANOMALY_RECORDS_SETTING = Setting.intSetting("max.anomaly.records", DEFAULT_MAX_NUM_RECORDS, Property.NodeScope); - /** - * This must match the value defined in CLicenseValidator::validate() in the C++ code - */ - static final long VALIDATION_NUMBER = 926213; - /* * General arguments */ static final String JOB_ID_ARG = "--jobid="; - static final String LICENSE_VALIDATION_ARG = "--licenseValidation="; /* * Arguments used by both autodetect and normalize @@ -132,7 +125,6 @@ public class ProcessCtrl { } private ProcessCtrl() { - } /** @@ -150,15 +142,13 @@ public class ProcessCtrl { return rng.nextInt(SECONDS_IN_HOUR); } - public static List buildAutodetectCommand(Environment env, Settings settings, Job job, Logger logger, long controllerPid) { + public static List buildAutodetectCommand(Environment env, Settings settings, Job job, Logger logger) { List command = new ArrayList<>(); command.add(AUTODETECT_PATH); String jobId = JOB_ID_ARG + job.getId(); command.add(jobId); - command.add(makeLicenseArg(controllerPid)); - AnalysisConfig analysisConfig = job.getAnalysisConfig(); if (analysisConfig != null) { addIfNotNull(analysisConfig.getBucketSpan(), BUCKET_SPAN_ARG, command); @@ -232,7 +222,7 @@ public class ProcessCtrl { private static void addIfNotNull(List timeValues, String argKey, List command) { if (timeValues != null) { - addIfNotNull(timeValues.stream().map(v -> v.getSeconds()).collect(Collectors.toList()), argKey, command); + addIfNotNull(timeValues.stream().map(TimeValue::getSeconds).collect(Collectors.toList()), argKey, command); } } @@ -256,12 +246,11 @@ public class ProcessCtrl { * Build the command to start the normalizer process. */ public static List buildNormalizerCommand(Environment env, String jobId, String quantilesState, Integer bucketSpan, - boolean perPartitionNormalization, long controllerPid) throws IOException { + boolean perPartitionNormalization) throws IOException { List command = new ArrayList<>(); command.add(NORMALIZE_PATH); addIfNotNull(bucketSpan, BUCKET_SPAN_ARG, command); - command.add(makeLicenseArg(controllerPid)); command.add(LENGTH_ENCODED_INPUT_ARG); if (perPartitionNormalization) { command.add(PER_PARTITION_NORMALIZATION); @@ -300,14 +289,4 @@ public class ProcessCtrl { return stateFile; } - - /** - * The number must be equal to the daemon controller's PID modulo a magic number. - */ - private static String makeLicenseArg(long controllerPid) { - // Get a random int rather than long so we don't overflow when multiplying by VALIDATION_NUMBER - long rand = Randomness.get().nextInt(); - long val = controllerPid + (((rand < 0) ? -rand : rand) + 1) * VALIDATION_NUMBER; - return LICENSE_VALIDATION_ARG + val; - } } diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java index 407bed6f19f..89418f9d531 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java @@ -31,7 +31,6 @@ import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; -import java.util.concurrent.TimeoutException; /** * The autodetect process builder. @@ -97,9 +96,9 @@ public class AutodetectBuilder { /** * Requests that the controller daemon start an autodetect process. */ - public void build() throws IOException, TimeoutException { + public void build() throws IOException { - List command = ProcessCtrl.buildAutodetectCommand(env, settings, job, logger, controller.getPid()); + List command = ProcessCtrl.buildAutodetectCommand(env, settings, job, logger); buildLimits(command); buildModelPlotConfig(command); diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 59c88093f2d..7d662820949 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskStatus; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; import org.elasticsearch.xpack.core.ml.job.process.autodetect.params.AutodetectParams; @@ -42,6 +41,7 @@ import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.action.TransportOpenJobAction.JobTask; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java index 1f869a184ad..5af8e721e1e 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java @@ -30,7 +30,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeoutException; public class NativeAutodetectProcessFactory implements AutodetectProcessFactory { @@ -99,7 +98,7 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory autodetectBuilder.build(); processPipes.connectStreams(PROCESS_STARTUP_TIMEOUT); - } catch (IOException | TimeoutException e) { + } catch (IOException e) { String msg = "Failed to launch autodetect for job " + job.getId(); LOGGER.error(msg); throw ExceptionsHelper.serverError(msg, e); diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java index 7113ca19d80..8f26a26f1b4 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java @@ -12,26 +12,26 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcess; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; -import org.elasticsearch.xpack.ml.job.process.normalizer.Renormalizer; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; -import org.elasticsearch.xpack.ml.job.results.AutodetectResult; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.core.ml.job.results.Forecast; import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.ModelPlot; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcess; +import org.elasticsearch.xpack.ml.job.process.normalizer.Renormalizer; +import org.elasticsearch.xpack.ml.job.results.AutodetectResult; import java.time.Duration; import java.util.Date; diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NativeNormalizerProcessFactory.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NativeNormalizerProcessFactory.java index 1125581d041..0c6bb407e78 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NativeNormalizerProcessFactory.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NativeNormalizerProcessFactory.java @@ -20,7 +20,6 @@ import java.time.Duration; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeoutException; public class NativeNormalizerProcessFactory implements NormalizerProcessFactory { @@ -53,12 +52,11 @@ public class NativeNormalizerProcessFactory implements NormalizerProcessFactory boolean perPartitionNormalization) { try { - List command = ProcessCtrl.buildNormalizerCommand(env, jobId, quantilesState, bucketSpan, - perPartitionNormalization, nativeController.getPid()); + List command = ProcessCtrl.buildNormalizerCommand(env, jobId, quantilesState, bucketSpan, perPartitionNormalization); processPipes.addArgs(command); nativeController.startProcess(command); processPipes.connectStreams(PROCESS_STARTUP_TIMEOUT); - } catch (IOException | TimeoutException e) { + } catch (IOException e) { String msg = "Failed to launch normalizer for job " + jobId; LOGGER.error(msg); throw ExceptionsHelper.serverError(msg, e); diff --git a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdater.java b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdater.java index 39c38d9ba27..ccda2552064 100644 --- a/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdater.java +++ b/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdater.java @@ -8,13 +8,13 @@ package org.elasticsearch.xpack.ml.job.process.normalizer; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.persistence.BatchedDocumentsIterator; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.ml.job.persistence.BatchedDocumentsIterator; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister; import java.util.ArrayList; import java.util.Deque; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java index a224763e0ca..aaa392feff2 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java @@ -17,9 +17,9 @@ import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.results.Bucket; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.junit.Before; @@ -185,4 +185,4 @@ public class DatafeedJobBuilderTests extends ESTestCase { return null; }).when(jobProvider).bucketsViaInternalClient(any(), any(), any(), any()); } -} \ No newline at end of file +} diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 1cf181e5f29..fe47e01447b 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobTests; import org.elasticsearch.xpack.core.ml.job.persistence.BucketsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.persistence.InfluencersQueryBuilder; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.persistence.RecordsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; @@ -38,6 +37,7 @@ import org.elasticsearch.xpack.core.ml.job.results.ModelPlot; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcess; import org.elasticsearch.xpack.ml.job.process.autodetect.output.AutoDetectResultProcessor; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java index 7ee8df9bbdb..66cc8c7c271 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java @@ -9,10 +9,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.results.Bucket; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.junit.Before; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobProviderIT.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobProviderIT.java index 6640e1a7ea3..eae38e6b5aa 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobProviderIT.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobProviderIT.java @@ -22,12 +22,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.core.ml.MLMetadataField; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; -import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.calendars.Calendar; @@ -43,16 +40,19 @@ import org.elasticsearch.xpack.core.ml.job.config.RuleAction; import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.CalendarQueryBuilder; -import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.core.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; -import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCountsTests; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; +import org.elasticsearch.xpack.ml.LocalStateMachineLearning; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCountsTests; import org.junit.Before; import java.io.IOException; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java index a5c12d87724..f967cddadab 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java @@ -33,9 +33,9 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.UpdateParams; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.junit.Before; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java index 96337e1e7a2..47a168aefad 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.job.persistence.BatchedDocumentsIterator; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; import org.junit.Before; import org.mockito.ArgumentCaptor; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java index 674279dcdb0..251da796b12 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java @@ -47,7 +47,6 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFiel import org.elasticsearch.xpack.core.ml.job.persistence.BucketsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.persistence.InfluencersQueryBuilder; import org.elasticsearch.xpack.core.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.persistence.RecordsQueryBuilder; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockBatchedDocumentsIterator.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockBatchedDocumentsIterator.java index d483711c10a..c0d15cab49c 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockBatchedDocumentsIterator.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockBatchedDocumentsIterator.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.xpack.core.ml.job.persistence.BatchedResultsIterator; import org.elasticsearch.xpack.core.ml.job.results.Result; import java.util.Deque; @@ -74,4 +73,4 @@ public class MockBatchedDocumentsIterator extends BatchedResultsIterator { public void requireIncludeInterim(boolean value) { this.requireIncludeInterim = value; } -} \ No newline at end of file +} diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrlTests.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrlTests.java index 4690bde52e3..8091c2a02c3 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrlTests.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrlTests.java @@ -26,9 +26,6 @@ import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilde public class ProcessCtrlTests extends ESTestCase { private final Logger logger = Mockito.mock(Logger.class); - // 4194304 is the maximum possible PID on Linux according to - // http://web.archive.org/web/20111209081734/http://research.cs.wisc.edu/condor/condorg/linux_scalability.html - private final long pid = randomIntBetween(2, 4194304); public void testBuildAutodetectCommand() { Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); @@ -52,8 +49,8 @@ public class ProcessCtrlTests extends ESTestCase { dd.setTimeField("tf"); job.setDataDescription(dd); - List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger, pid); - assertEquals(14, command.size()); + List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger); + assertEquals(13, command.size()); assertTrue(command.contains(ProcessCtrl.AUTODETECT_PATH)); assertTrue(command.contains(ProcessCtrl.BUCKET_SPAN_ARG + "120")); assertTrue(command.contains(ProcessCtrl.LATENCY_ARG + "360")); @@ -65,7 +62,6 @@ public class ProcessCtrlTests extends ESTestCase { assertTrue(command.contains(ProcessCtrl.maxAnomalyRecordsArg(settings))); assertTrue(command.contains(ProcessCtrl.TIME_FIELD_ARG + "tf")); - assertTrue(hasValidLicense(command)); assertTrue(command.contains(ProcessCtrl.JOB_ID_ARG + "unit-test-job")); assertTrue(command.contains(ProcessCtrl.PER_PARTITION_NORMALIZATION)); @@ -81,7 +77,7 @@ public class ProcessCtrlTests extends ESTestCase { Environment env = TestEnvironment.newEnvironment(settings); Job.Builder job = buildJobBuilder("unit-test-job"); - List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger, pid); + List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger); assertTrue(command.contains(ProcessCtrl.TIME_FIELD_ARG + "time")); } @@ -94,13 +90,13 @@ public class ProcessCtrlTests extends ESTestCase { int expectedPersistInterval = 10800 + ProcessCtrl.calculateStaggeringInterval(job.getId()); - List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger, pid); + List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger); assertFalse(command.contains(ProcessCtrl.PERSIST_INTERVAL_ARG + expectedPersistInterval)); settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); env = TestEnvironment.newEnvironment(settings); - command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger, pid); + command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger); assertTrue(command.contains(ProcessCtrl.PERSIST_INTERVAL_ARG + expectedPersistInterval)); } @@ -109,30 +105,11 @@ public class ProcessCtrlTests extends ESTestCase { Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build()); String jobId = "unit-test-job"; - List command = ProcessCtrl.buildNormalizerCommand(env, jobId, null, 300, true, pid); - assertEquals(5, command.size()); + List command = ProcessCtrl.buildNormalizerCommand(env, jobId, null, 300, true); + assertEquals(4, command.size()); assertTrue(command.contains(ProcessCtrl.NORMALIZE_PATH)); assertTrue(command.contains(ProcessCtrl.BUCKET_SPAN_ARG + "300")); - assertTrue(hasValidLicense(command)); assertTrue(command.contains(ProcessCtrl.LENGTH_ENCODED_INPUT_ARG)); assertTrue(command.contains(ProcessCtrl.PER_PARTITION_NORMALIZATION)); } - - private boolean hasValidLicense(List command) throws NumberFormatException { - int matches = 0; - for (String arg : command) { - if (arg.startsWith(ProcessCtrl.LICENSE_VALIDATION_ARG)) { - ++matches; - String[] argAndVal = arg.split("="); - if (argAndVal.length != 2) { - return false; - } - long val = Long.parseLong(argAndVal[1]); - if ((val % ProcessCtrl.VALIDATION_NUMBER) != (pid % ProcessCtrl.VALIDATION_NUMBER)) { - return false; - } - } - } - return matches == 1; - } } diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index a1afc619026..6b5582a2b6c 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -21,9 +21,6 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.ml.action.TransportOpenJobAction.JobTask; -import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.DetectionRule; @@ -34,17 +31,20 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTaskStatus; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; -import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.core.ml.job.process.autodetect.params.AutodetectParams; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; +import org.elasticsearch.xpack.ml.action.TransportOpenJobAction.JobTask; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests; +import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerFactory; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.junit.Before; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java index 8f2c5887d7f..b593c7856e7 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java @@ -15,20 +15,20 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; -import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcess; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; -import org.elasticsearch.xpack.ml.job.process.normalizer.Renormalizer; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; -import org.elasticsearch.xpack.ml.job.results.AutodetectResult; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.ModelPlot; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcess; +import org.elasticsearch.xpack.ml.job.process.normalizer.Renormalizer; +import org.elasticsearch.xpack.ml.job.results.AutodetectResult; import org.junit.Before; import org.mockito.InOrder; diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java index 2c91ffd94c3..eedc42148b1 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java +++ b/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java @@ -11,14 +11,14 @@ import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister; -import org.elasticsearch.xpack.ml.job.persistence.MockBatchedDocumentsIterator; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister; +import org.elasticsearch.xpack.ml.job.persistence.MockBatchedDocumentsIterator; import org.junit.Before; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; diff --git a/plugin/monitoring/build.gradle b/plugin/monitoring/build.gradle index 2563b39ec9b..039b6522040 100644 --- a/plugin/monitoring/build.gradle +++ b/plugin/monitoring/build.gradle @@ -1,4 +1,7 @@ -evaluationDependsOn(':x-pack-elasticsearch:plugin:core') +import com.carrotsearch.gradle.junit4.RandomizedTestingTask +import org.elasticsearch.gradle.BuildPlugin + +evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { @@ -6,22 +9,16 @@ esplugin { description 'Elasticsearch Expanded Pack Plugin - Monitoring' classname 'org.elasticsearch.xpack.monitoring.Monitoring' hasNativeController false - requiresKeystore true + requiresKeystore false extendedPlugins = ['x-pack-core'] - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') } archivesBaseName = 'x-pack-monitoring' -licenseHeaders.enabled = false - -integTest.enabled = false - dependencies { provided "org.elasticsearch:elasticsearch:${version}" provided "org.elasticsearch.plugin:x-pack-core:${version}" - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // monitoring deps compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" @@ -34,8 +31,6 @@ dependencies { compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" -// TODO: don't publish test artifacts just to run messy tests, fix the tests! -// https://github.com/elastic/x-plugins/issues/724 configurations { testArtifacts.extendsFrom testRuntime } @@ -60,5 +55,30 @@ dependencyLicenses { } run { - plugin ':x-pack-elasticsearch:plugin:core' + plugin xpackModule('core') +} + +// xpack modules are installed in real clusters as the meta plugin, so +// installing them as individual plugins for integ tests doesn't make sense, +// so we disable integ tests +integTest.enabled = false + +// Instead we create a separate task to run the +// tests based on ESIntegTestCase +task internalClusterTest(type: RandomizedTestingTask, + group: JavaBasePlugin.VERIFICATION_GROUP, + description: 'Multi-node tests', + dependsOn: test.dependsOn) { + configure(BuildPlugin.commonTestConfig(project)) + classpath = project.test.classpath + testClassesDir = project.test.testClassesDir + include '**/*IT.class' + systemProperty 'es.set.netty.runtime.available.processors', 'false' +} +check.dependsOn internalClusterTest +internalClusterTest.mustRunAfter test + +// also add an "alias" task to make typing on the command line easier task icTest { +task icTest { + dependsOn internalClusterTest } diff --git a/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/local/LocalIndicesCleanerTests.java b/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/local/LocalIndicesCleanerTests.java index c749e318abf..b74701d35fe 100644 --- a/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/local/LocalIndicesCleanerTests.java +++ b/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/local/LocalIndicesCleanerTests.java @@ -50,7 +50,6 @@ public class LocalIndicesCleanerTests extends AbstractIndicesCleanerTestCase { protected void createIndex(String name, DateTime creationDate) { assertAcked(prepareCreate(name) .setSettings(Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, creationDate.getMillis()).build())); - ensureYellow(name); } @Override diff --git a/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index 0250736ca4e..5075bb1066d 100644 --- a/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -113,7 +113,7 @@ public class LocalExporterIntegTests extends LocalExporterIntegTestCase { refresh(); assertThat(client().admin().indices().prepareExists(".monitoring-*").get().isExists(), is(true)); - ensureYellow(".monitoring-*"); + ensureYellowAndNoInitializingShards(".monitoring-*"); SearchResponse response = client().prepareSearch(".monitoring-*").get(); assertEquals(nbDocs, response.getHits().getTotalHits()); @@ -132,7 +132,7 @@ public class LocalExporterIntegTests extends LocalExporterIntegTestCase { final int numNodes = internalCluster().getNodeNames().length; assertBusy(() -> { assertThat(client().admin().indices().prepareExists(".monitoring-*").get().isExists(), is(true)); - ensureYellow(".monitoring-*"); + ensureYellowAndNoInitializingShards(".monitoring-*"); assertThat(client().prepareSearch(".monitoring-es-*") .setSize(0) @@ -193,7 +193,7 @@ public class LocalExporterIntegTests extends LocalExporterIntegTestCase { assertBusy(() -> { IndicesExistsResponse indicesExistsResponse = client().admin().indices().prepareExists(".monitoring-*").get(); if (indicesExistsResponse.isExists()) { - ensureYellow(".monitoring-*"); + ensureYellowAndNoInitializingShards(".monitoring-*"); refresh(".monitoring-es-*"); SearchResponse response = client().prepareSearch(".monitoring-es-*") diff --git a/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index 69f8b0f626a..713d1d4637d 100644 --- a/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -11,6 +11,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.nio.entity.NStringEntity; import org.apache.lucene.util.Constants; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.client.Response; @@ -65,6 +66,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3728") public class MonitoringIT extends ESRestTestCase { private static final String BASIC_AUTH_VALUE = basicAuthHeaderValue("x_pack_rest_user", TEST_PASSWORD_SECURE_STRING); diff --git a/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java b/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java index 8951b33f319..9bf9e530af5 100644 --- a/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java +++ b/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/test/MonitoringIntegTestCase.java @@ -156,7 +156,7 @@ public abstract class MonitoringIntegTestCase extends ESIntegTestCase { } protected void ensureMonitoringIndicesYellow() { - ensureYellow(".monitoring-es-*"); + ensureYellowAndNoInitializingShards(".monitoring-es-*"); } protected List> monitoringTemplates() { diff --git a/plugin/security/build.gradle b/plugin/security/build.gradle index 72ee2a0a5d6..64d33fa2486 100644 --- a/plugin/security/build.gradle +++ b/plugin/security/build.gradle @@ -1,4 +1,4 @@ -evaluationDependsOn(':x-pack-elasticsearch:plugin:core') +evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { @@ -8,16 +8,10 @@ esplugin { hasNativeController false requiresKeystore true extendedPlugins = ['x-pack-core'] - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') } archivesBaseName = 'x-pack-security' -licenseHeaders.enabled = false - -integTest.enabled = false - dependencies { provided "org.elasticsearch:elasticsearch:${version}" @@ -25,10 +19,11 @@ dependencies { compileOnly project(path: ':modules:transport-netty4', configuration: 'runtime') provided project(path: ':plugins:transport-nio', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:monitoring') + testCompile project(path: xpackModule('monitoring')) + // TODO: figure out how to use xpackModule for this testCompile project(path: ':x-pack-elasticsearch:plugin:sql:sql-proto') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') //compile project(path: ':modules:transport-netty4') compile 'com.unboundid:unboundid-ldapsdk:3.2.0' @@ -85,11 +80,6 @@ artifacts { archives jar testArtifacts testJar } -//testClasses { -// dependsOn project(":x-pack-elasticsearch:plugin:core").testJar -//} -// TestUtils creates things in core, and the cli thinks that it needs to read from the core testArtifacts -// jar, so it fails with a zip file resource not found in getResource(Path) sourceSets.test.resources { srcDir '../core/src/test/resources' } @@ -230,7 +220,7 @@ if (JavaVersion.current() > JavaVersion.VERSION_1_8) { } run { - plugin ':x-pack-elasticsearch:plugin:core' + plugin xpackModule('core') } test { @@ -241,10 +231,7 @@ test { systemProperty 'es.set.netty.runtime.available.processors', 'false' } -integTestRunner { - /* - * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each - * other if we allow them to set the number of available processors as it's set-once in Netty. - */ - systemProperty 'es.set.netty.runtime.available.processors', 'false' -} +// xpack modules are installed in real clusters as the meta plugin, so +// installing them as individual plugins for integ tests doesn't make sense, +// so we disable integ tests +integTest.enabled = false diff --git a/plugin/security/src/main/bin/extension b/plugin/security/src/main/bin/extension deleted file mode 100755 index 3bed986920b..00000000000 --- a/plugin/security/src/main/bin/extension +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License; -# you may not use this file except in compliance with the Elastic License. - -source "`dirname "$0"`"/../elasticsearch-env - -source "`dirname "$0"`"/x-pack-security-env - -exec \ - "$JAVA" \ - $ES_JAVA_OPTS \ - -Des.path.home="$ES_HOME" \ - -Des.path.conf="$ES_PATH_CONF" \ - -cp "$ES_CLASSPATH" \ - org.elasticsearch.xpack.core.extensions.XPackExtensionCli \ - "$@" diff --git a/plugin/security/src/main/bin/extension.bat b/plugin/security/src/main/bin/extension.bat deleted file mode 100644 index 4f6bb3a0515..00000000000 Binary files a/plugin/security/src/main/bin/extension.bat and /dev/null differ diff --git a/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index df7718b296b..a90d44b2bf6 100644 --- a/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.util.Providers; @@ -79,9 +78,8 @@ import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.extensions.XPackExtension; -import org.elasticsearch.xpack.core.extensions.XPackExtensionsService; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.SecurityExtension; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.SecuritySettings; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; @@ -228,12 +226,10 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.UnaryOperator; -import java.util.stream.Collectors; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; -import static org.elasticsearch.xpack.core.XPackPlugin.resolveXPackExtensionsFile; import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED; import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME; import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; @@ -266,17 +262,16 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw private final SetOnce tokenService = new SetOnce<>(); private final SetOnce securityActionFilter = new SetOnce<>(); private final List bootstrapChecks; - private final XPackExtensionsService extensionsService; private final List securityExtensions = new ArrayList<>(); - public Security(Settings settings, final Path configPath) { + this(settings, configPath, Collections.emptyList()); + } + + Security(Settings settings, final Path configPath, List extensions) { this.settings = settings; this.transportClientMode = XPackPlugin.transportClientMode(settings); this.env = transportClientMode ? null : new Environment(settings, configPath); - this.extensionsService = transportClientMode ? null : new XPackExtensionsService(settings, resolveXPackExtensionsFile(env), - Collections.emptyList()); - this.enabled = XPackSettings.SECURITY_ENABLED.get(settings); if (enabled && transportClientMode == false) { validateAutoCreateIndex(settings); @@ -295,6 +290,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw } else { this.bootstrapChecks = Collections.emptyList(); } + this.securityExtensions.addAll(extensions); } @Override @@ -353,8 +349,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { try { - return createComponents(client, threadPool, clusterService, resourceWatcherService, - extensionsService.getExtensions().stream().collect(Collectors.toList())); + return createComponents(client, threadPool, clusterService, resourceWatcherService); } catch (final Exception e) { throw new IllegalStateException("security initialization failed", e); } @@ -362,8 +357,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw // pkg private for testing - tests want to pass in their set of extensions hence we are not using the extension service directly Collection createComponents(Client client, ThreadPool threadPool, ClusterService clusterService, - ResourceWatcherService resourceWatcherService, - List extensions) throws Exception { + ResourceWatcherService resourceWatcherService) throws Exception { if (enabled == false) { return Collections.emptyList(); } @@ -417,9 +411,6 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw Map realmFactories = new HashMap<>(InternalRealms.getFactories(threadPool, resourceWatcherService, getSslService(), nativeUsersStore, nativeRoleMappingStore, securityLifecycleService)); for (SecurityExtension extension : securityExtensions) { - extensions.add(extension); - } - for (SecurityExtension extension : extensions) { Map newRealms = extension.getRealms(resourceWatcherService); for (Map.Entry entry : newRealms.entrySet()) { if (realmFactories.put(entry.getKey(), entry.getValue()) != null) { @@ -435,7 +426,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw AuthenticationFailureHandler failureHandler = null; String extensionName = null; - for (SecurityExtension extension : extensions) { + for (SecurityExtension extension : securityExtensions) { AuthenticationFailureHandler extensionFailureHandler = extension.getAuthenticationFailureHandler(); if (extensionFailureHandler != null && failureHandler != null) { throw new IllegalStateException("Extensions [" + extensionName + "] and [" + extension.toString() + "] " + @@ -459,7 +450,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityLifecycleService); final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(); List, ActionListener>>> rolesProviders = new ArrayList<>(); - for (SecurityExtension extension : extensions) { + for (SecurityExtension extension : securityExtensions) { rolesProviders.addAll(extension.getRolesProviders(settings, resourceWatcherService)); } final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, @@ -542,13 +533,13 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw @Override public List> getSettings() { - return getSettings(transportClientMode, extensionsService); + return getSettings(transportClientMode, securityExtensions); } /** * Get the {@link Setting setting configuration} for all security components, including those defined in extensions. */ - public static List> getSettings(boolean transportClientMode, @Nullable XPackExtensionsService extensionsService) { + public static List> getSettings(boolean transportClientMode, List securityExtensions) { List> settingsList = new ArrayList<>(); if (transportClientMode) { @@ -567,7 +558,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw // authentication settings AnonymousUser.addSettings(settingsList); - RealmSettings.addSettings(settingsList, extensionsService == null ? null : extensionsService.getExtensions()); + RealmSettings.addSettings(settingsList, securityExtensions); NativeRolesStore.addSettings(settingsList); ReservedRealm.addSettings(settingsList); AuthenticationService.addSettings(settingsList); @@ -596,8 +587,6 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw if (AuthenticationServiceField.RUN_AS_ENABLED.get(settings)) { headers.add(AuthenticationServiceField.RUN_AS_USER_HEADER); } - headers.addAll(extensionsService.getExtensions().stream() - .flatMap(e -> e.getRestHeaders().stream()).collect(Collectors.toList())); return headers; } @@ -605,12 +594,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw public List getSettingsFilter() { List asArray = settings.getAsList(SecurityField.setting("hide_settings")); ArrayList settingsFilter = new ArrayList<>(asArray); - if (transportClientMode == false) { - settingsFilter.addAll(RealmSettings.getSettingsFilter(extensionsService.getExtensions())); - for (XPackExtension extension : extensionsService.getExtensions()) { - settingsFilter.addAll(extension.getSettingsFilter()); - } - } + settingsFilter.addAll(RealmSettings.getSettingsFilter(securityExtensions)); // hide settings where we don't define them - they are part of a group... settingsFilter.add("transport.profiles.*." + SecurityField.setting("*")); return settingsFilter; diff --git a/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java index 517b13933cf..143b3ffd64b 100644 --- a/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java +++ b/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.security.authc.saml.SamlRedirect; import org.elasticsearch.xpack.security.authc.saml.SamlUtils; import org.opensaml.saml.saml2.core.LogoutResponse; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -122,7 +123,7 @@ public final class TransportSamlInvalidateSessionAction })), listener::onFailure)); } - private List> filterTokens(List> tokens, Map requiredMetadata) { + private List> filterTokens(Collection> tokens, Map requiredMetadata) { return tokens.stream() .filter(tup -> { Map actualMetadata = tup.v1().getMetadata(); diff --git a/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 5ebd7933926..49f9b9c01a9 100644 --- a/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -62,8 +63,10 @@ import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.security.ScrollHelper; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; @@ -97,6 +100,7 @@ import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -106,8 +110,7 @@ import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; +import java.util.function.Supplier; import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; @@ -815,7 +818,7 @@ public final class TokenService extends AbstractComponent { * Find all stored refresh and access tokens that have not been invalidated or expired, and were issued against * the specified realm. */ - public void findActiveTokensForRealm(String realmName, ActionListener>> listener) { + public void findActiveTokensForRealm(String realmName, ActionListener>> listener) { ensureEnabled(); if (Strings.isNullOrEmpty(realmName)) { @@ -835,32 +838,30 @@ public final class TokenService extends AbstractComponent { .should(QueryBuilders.termQuery("refresh_token.invalidated", false)) ); - SearchRequest request = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + final SearchRequest request = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .setScroll(TimeValue.timeValueSeconds(10L)) .setQuery(boolQuery) .setVersion(false) + .setSize(1000) + .setFetchSource(true) .request(); + final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); lifecycleService.prepareIndexIfNeededThenExecute(listener::onFailure, () -> - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, - ActionListener.wrap(searchResponse -> { - if (searchResponse.isTimedOut()) { - listener.onFailure(new ElasticsearchSecurityException("Failed to find user tokens")); - } else { - listener.onResponse(parseDocuments(searchResponse)); - } - }, listener::onFailure), - client::search)); + ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), this::parseHit)); } - private List> parseDocuments(SearchResponse searchResponse) { - return StreamSupport.stream(searchResponse.getHits().spliterator(), false).map(hit -> { - final Map source = hit.getSourceAsMap(); - try { - return parseTokensFromDocument(source); - } catch (IOException e) { - throw invalidGrantException("cannot read token from document"); - } - }).collect(Collectors.toList()); + private Tuple parseHit(SearchHit hit) { + final Map source = hit.getSourceAsMap(); + if (source == null) { + throw new IllegalStateException("token document did not have source but source should have been fetched"); + } + + try { + return parseTokensFromDocument(source); + } catch (IOException e) { + throw invalidGrantException("cannot read token from document"); + } } /** diff --git a/plugin/security/src/main/plugin-metadata/plugin-security.policy b/plugin/security/src/main/plugin-metadata/plugin-security.policy index 84f4eb5ca10..857c2f6e472 100644 --- a/plugin/security/src/main/plugin-metadata/plugin-security.policy +++ b/plugin/security/src/main/plugin-metadata/plugin-security.policy @@ -5,6 +5,7 @@ grant { permission java.util.PropertyPermission "*", "read,write"; // needed because of SAML (cf. o.e.x.s.s.RestorableContextClassLoader) + permission java.lang.RuntimePermission "getClassLoader"; permission java.lang.RuntimePermission "setContextClassLoader"; // needed for multiple server implementations used in tests diff --git a/plugin/security/src/test/java/org/elasticsearch/license/LicenseServiceWithSecurityTests.java b/plugin/security/src/test/java/org/elasticsearch/license/LicenseServiceWithSecurityTests.java index 230e29c6b80..42bfd2df1d5 100644 --- a/plugin/security/src/test/java/org/elasticsearch/license/LicenseServiceWithSecurityTests.java +++ b/plugin/security/src/test/java/org/elasticsearch/license/LicenseServiceWithSecurityTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.xpack.security.LocalStateSecurity; -import org.elasticsearch.xpack.security.Security; import java.util.Arrays; import java.util.Collection; @@ -39,7 +38,7 @@ public class LicenseServiceWithSecurityTests extends SecurityIntegTestCase { License license = licensingClient.prepareGetLicense().get().license(); License prodLicense = TestUtils.generateSignedLicense("platinum", TimeValue.timeValueHours(24)); IllegalStateException ise = expectThrows(IllegalStateException.class, () -> licensingClient.preparePutLicense(prodLicense).get()); - assertEquals("Can not upgrade to a production license unless TLS is configured or security is disabled", ise.getMessage()); + assertEquals("Cannot install a [PLATINUM] license unless TLS is configured or security is disabled", ise.getMessage()); assertThat(licensingClient.prepareGetLicense().get().license(), equalTo(license)); } diff --git a/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index d60d9f3a64e..37bea975eaa 100644 --- a/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -31,11 +31,14 @@ import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.security.test.SecurityTestUtils; +import java.io.IOException; +import java.io.InputStream; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.FileSystemNotFoundException; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -43,6 +46,7 @@ import java.util.List; import java.util.function.Consumer; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; +import static org.apache.lucene.util.LuceneTestCase.createTempFile; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile; @@ -331,15 +335,14 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas } private static Path resolveResourcePath(String resourcePathToStore) { - final URL url = SecuritySettingsSource.class.getResource(resourcePathToStore); try { - Path path = PathUtils.get(url.toURI()); - if (Files.notExists(path)) { - throw new ElasticsearchException("path does not exist: " + path); + Path path = createTempFile(); + try (InputStream resourceInput = SecuritySettingsSource.class.getResourceAsStream(resourcePathToStore)) { + Files.copy(resourceInput, path, StandardCopyOption.REPLACE_EXISTING); } return path; - } catch (URISyntaxException | FileSystemNotFoundException e) { - throw new ElasticsearchException("Failed to resolve resource (Path=[{}] URL=[{}])", e, resourcePathToStore, url); + } catch (IOException e) { + throw new ElasticsearchException("Failed to resolve resource (Path=[{}])", e, resourcePathToStore); } } diff --git a/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index c79a369cd1b..7ab26b0c33f 100644 --- a/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -81,8 +81,8 @@ public class ScrollHelperIntegTests extends ESSingleNodeTestCase { SearchRequest request = new SearchRequest(); String scrollId = randomAlphaOfLength(5); - SearchHit[] hits = new SearchHit[] {new SearchHit(1)}; - InternalSearchResponse internalResponse = new InternalSearchResponse(new SearchHits(hits, 1, 1), null, null, null, false, false, 1); + SearchHit[] hits = new SearchHit[] {new SearchHit(1), new SearchHit(2)}; + InternalSearchResponse internalResponse = new InternalSearchResponse(new SearchHits(hits, 3, 1), null, null, null, false, false, 1); SearchResponse response = new SearchResponse(internalResponse, scrollId, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); @@ -112,7 +112,7 @@ public class ScrollHelperIntegTests extends ESSingleNodeTestCase { }, Function.identity()); assertNotNull("onFailure wasn't called", failure.get()); - assertEquals("scrolling returned more hits [2] than expected [1] so bailing out to prevent unbounded memory consumption.", + assertEquals("scrolling returned more hits [4] than expected [3] so bailing out to prevent unbounded memory consumption.", failure.get().getMessage()); } } diff --git a/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 33ecbb9e3ad..768a2642e70 100644 --- a/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.security.SecurityExtension; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; @@ -94,7 +95,7 @@ public class SecurityTests extends ESTestCase { Environment env = TestEnvironment.newEnvironment(settings); licenseState = new TestUtils.UpdatableLicenseState(); SSLService sslService = new SSLService(settings, env); - security = new Security(settings, null) { + security = new Security(settings, null, Arrays.asList(extensions)) { @Override protected XPackLicenseState getLicenseState() { return licenseState; @@ -118,8 +119,7 @@ public class SecurityTests extends ESTestCase { Client client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); - return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), - Arrays.asList(extensions)); + return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class)); } private static T findComponent(Class type, Collection components) { diff --git a/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 2bd300a694b..e8f7eaf877b 100644 --- a/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -14,6 +14,9 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.ClearScrollAction; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -144,6 +147,12 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { new SearchResponseSections(new SearchHits(hits, hits.length, 0f), null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null); listener.onResponse((Response) response); + } else if (ClearScrollAction.NAME.equals(action.name())) { + assertThat(request, instanceOf(ClearScrollRequest.class)); + ClearScrollRequest scrollRequest = (ClearScrollRequest) request; + assertEquals("_scrollId1", scrollRequest.getScrollIds().get(0)); + ClearScrollResponse response = new ClearScrollResponse(true, 1); + listener.onResponse((Response) response); } else { super.doExecute(action, request, listener); } diff --git a/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java b/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java index e4cc92ebe91..12654e2e23d 100644 --- a/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java +++ b/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java @@ -886,7 +886,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { private SearchHit getIndexedAuditMessage(Message message) throws InterruptedException { assertNotNull("no audit message was enqueued", message); final String indexName = IndexNameResolver.resolve(IndexAuditTrailField.INDEX_NAME_PREFIX, message.timestamp, rollover); - ensureYellow(indexName); + ensureYellowAndNoInitializingShards(indexName); GetSettingsResponse settingsResponse = getClient().admin().indices().prepareGetSettings(indexName).get(); assertThat(settingsResponse.getSetting(indexName, "index.number_of_shards"), is(Integer.toString(numShards))); assertThat(settingsResponse.getSetting(indexName, "index.number_of_replicas"), is(Integer.toString(numReplicas))); @@ -916,14 +916,18 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { } @Override - public ClusterHealthStatus ensureYellow(String... indices) { + public ClusterHealthStatus ensureYellowAndNoInitializingShards(String... indices) { if (remoteIndexing == false) { - return super.ensureYellow(indices); + return super.ensureYellowAndNoInitializingShards(indices); } - // pretty ugly but just a rip of ensureYellow that uses a different client + // pretty ugly but just a rip of ensureYellowAndNoInitializingShards that uses a different client ClusterHealthResponse actionGet = getClient().admin().cluster().health(Requests.clusterHealthRequest(indices) - .waitForNoRelocatingShards(true).waitForYellowStatus().waitForEvents(Priority.LANGUID)).actionGet(); + .waitForNoRelocatingShards(true) + .waitForYellowStatus() + .waitForEvents(Priority.LANGUID) + .waitForNoInitializingShards(true)) + .actionGet(); if (actionGet.isTimedOut()) { logger.info("ensureYellow timed out, cluster state:\n{}\n{}", getClient().admin().cluster().prepareState().get().getState(), diff --git a/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmSettingsTests.java b/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmSettingsTests.java index ecc66387b94..78d2f34bb4b 100644 --- a/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmSettingsTests.java +++ b/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmSettingsTests.java @@ -12,9 +12,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.extensions.XPackExtension; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.core.security.SecurityExtension; import java.util.ArrayList; import java.util.Arrays; @@ -45,7 +45,7 @@ public class RealmSettingsTests extends ESTestCase { * This test exists because (in 5.x), we want to be backwards compatible and accept custom realms that * have not been updated to explicitly declare their settings. * - * @see XPackExtension#getRealmSettings() + * @see org.elasticsearch.xpack.core.security.SecurityExtension#getRealmSettings() */ public void testRealmWithUnknownTypeAcceptsAllSettings() throws Exception { final Settings.Builder settings = baseSettings("tam", true) @@ -322,7 +322,7 @@ public class RealmSettingsTests extends ESTestCase { private Setting group() { final List> list = new ArrayList<>(); - final List noExtensions = Collections.emptyList(); + final List noExtensions = Collections.emptyList(); RealmSettings.addSettings(list, noExtensions); assertThat(list, hasSize(1)); return list.get(0); diff --git a/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlSpMetadataBuilderTests.java b/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlSpMetadataBuilderTests.java index 73e2a516075..35956236b6c 100644 --- a/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlSpMetadataBuilderTests.java +++ b/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlSpMetadataBuilderTests.java @@ -87,21 +87,21 @@ public class SamlSpMetadataBuilderTests extends SamlTestCase { " protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\">" + "" + "" + - "MIIDWDCCAkCgAwIBAgIVANRTZaFrK+Pz19O8TZsb3HSJmAWpMA0GCSqGSIb3DQEBCwUAMB0xGzAZ\n" + - "BgNVBAMTEkVsYXN0aWNzZWFyY2gtU0FNTDAeFw0xNzExMjkwMjQ3MjZaFw0yMDExMjgwMjQ3MjZa\n" + - "MB0xGzAZBgNVBAMTEkVsYXN0aWNzZWFyY2gtU0FNTDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC\n" + - "AQoCggEBALHTuPGOieCbD2mZUdYrdH4ofo7qFze6rQUROCLKqf69uBuwvraNWOcwxHUTKVlLMV3d\n" + - "dKzYo+yfC44AMXrrV+79xVWsTCNHu9sxQzcDwiEx2OtOOX9MAk6tJQ3svNrMPNXWh8ftwmmY9XdF\n" + - "ZwMYUdo6FPjSQj5uQTDmGWRgF08f7VRlk6N92d/fzn9DlDm+TFuaOr17OTSR4B6RTrNwKC29AmXQ\n" + - "TwCijCObjLqyMEqP20dZCQeVf2qw8JKUHhW4r6mCLzqmeR+kRTqiHMSWxJddzxDGw6X7fOS7iuzB\n" + - "0+TnsKwgu8nYrEXds9MkGf1Yco7WsM43g+Es+LhNHP+es70CAwEAAaOBjjCBizAdBgNVHQ4EFgQU\n" + - "ILqVKGhIi8p5Xffsow/IKFLhRbIwWQYDVR0jBFIwUIAUILqVKGhIi8p5Xffsow/IKFLhRbKhIaQf\n" + - "MB0xGzAZBgNVBAMTEkVsYXN0aWNzZWFyY2gtU0FNTIIVANRTZaFrK+Pz19O8TZsb3HSJmAWpMA8G\n" + - "A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGhl4V9mp4SWSV2E3HAJ1PX+Vmp6k27K\n" + - "d0tkOk1B9fyA13QB30teyiL7RR0vSHRyWFY8rQH1mHD366GKRWLITRG/QPULamGdYXX4h0pFj5ld\n" + - "aubLxM/O9vEAxOgmo/lsdkeIq9tLBqY06r/5A/Mcgo63KGi00AFYBoyvqfOu6nRLPnQr+rKVfdNO\n" + - "pWeIiFY1i2XTNZ3CZjNPSTwiQMUzrCxKXB9lL0vF6QL2Gj2iBhzNfXi88wf7xaR6XKY1wNuv3HLP\n" + - "sL7n+PWby7LRX188dyS1dmKfQcrKL65OssBA5NC8CAYyBiygBmWN+5kVJM5fSb0SwPSoVWrNyz+8\n" + + "MIIDWDCCAkCgAwIBAgIVANRTZaFrK+Pz19O8TZsb3HSJmAWpMA0GCSqGSIb3DQEBCwUAMB0xGzAZ" + System.lineSeparator() + + "BgNVBAMTEkVsYXN0aWNzZWFyY2gtU0FNTDAeFw0xNzExMjkwMjQ3MjZaFw0yMDExMjgwMjQ3MjZa" + System.lineSeparator() + + "MB0xGzAZBgNVBAMTEkVsYXN0aWNzZWFyY2gtU0FNTDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC" + System.lineSeparator() + + "AQoCggEBALHTuPGOieCbD2mZUdYrdH4ofo7qFze6rQUROCLKqf69uBuwvraNWOcwxHUTKVlLMV3d" + System.lineSeparator() + + "dKzYo+yfC44AMXrrV+79xVWsTCNHu9sxQzcDwiEx2OtOOX9MAk6tJQ3svNrMPNXWh8ftwmmY9XdF" + System.lineSeparator() + + "ZwMYUdo6FPjSQj5uQTDmGWRgF08f7VRlk6N92d/fzn9DlDm+TFuaOr17OTSR4B6RTrNwKC29AmXQ" + System.lineSeparator() + + "TwCijCObjLqyMEqP20dZCQeVf2qw8JKUHhW4r6mCLzqmeR+kRTqiHMSWxJddzxDGw6X7fOS7iuzB" + System.lineSeparator() + + "0+TnsKwgu8nYrEXds9MkGf1Yco7WsM43g+Es+LhNHP+es70CAwEAAaOBjjCBizAdBgNVHQ4EFgQU" + System.lineSeparator() + + "ILqVKGhIi8p5Xffsow/IKFLhRbIwWQYDVR0jBFIwUIAUILqVKGhIi8p5Xffsow/IKFLhRbKhIaQf" + System.lineSeparator() + + "MB0xGzAZBgNVBAMTEkVsYXN0aWNzZWFyY2gtU0FNTIIVANRTZaFrK+Pz19O8TZsb3HSJmAWpMA8G" + System.lineSeparator() + + "A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGhl4V9mp4SWSV2E3HAJ1PX+Vmp6k27K" + System.lineSeparator() + + "d0tkOk1B9fyA13QB30teyiL7RR0vSHRyWFY8rQH1mHD366GKRWLITRG/QPULamGdYXX4h0pFj5ld" + System.lineSeparator() + + "aubLxM/O9vEAxOgmo/lsdkeIq9tLBqY06r/5A/Mcgo63KGi00AFYBoyvqfOu6nRLPnQr+rKVfdNO" + System.lineSeparator() + + "pWeIiFY1i2XTNZ3CZjNPSTwiQMUzrCxKXB9lL0vF6QL2Gj2iBhzNfXi88wf7xaR6XKY1wNuv3HLP" + System.lineSeparator() + + "sL7n+PWby7LRX188dyS1dmKfQcrKL65OssBA5NC8CAYyBiygBmWN+5kVJM5fSb0SwPSoVWrNyz+8" + System.lineSeparator() + "IUldQE8=" + "" + "" + diff --git a/plugin/sql/build.gradle b/plugin/sql/build.gradle index d25f76535e6..b60b6267890 100644 --- a/plugin/sql/build.gradle +++ b/plugin/sql/build.gradle @@ -108,3 +108,6 @@ task regen { } } +//project(xpackModule('core')).bundlePlugin { +// from jar +//} diff --git a/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml b/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml index 59cf1cf65b1..ddc4ef056cb 100644 --- a/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml +++ b/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml @@ -1135,3 +1135,122 @@ } } +--- +"Test job with rules": + + - do: + xpack.ml.put_job: + job_id: jobs-crud-rules + body: > + { + "analysis_config": { + "detectors": [ + { + "function": "count", + "by_field_name": "country", + "rules": [ + { + "actions": ["filter_results", "skip_sampling"], + "conditions": [ + { + "type":"numerical_actual", + "field_name":"country", + "field_value": "uk", + "condition": {"operator":"lt","value":"33.3"} + }, + {"type":"categorical", "field_name":"country", "filter_id": "foo"} + ] + } + ] + } + ] + }, + "data_description" : {} + } + + - do: + xpack.ml.get_jobs: + job_id: jobs-crud-rules + - match: { count: 1 } + - match: { + jobs.0.analysis_config.detectors.0.rules: [ + { + "actions": ["filter_results", "skip_sampling"], + "conditions_connective": "or", + "conditions": [ + { + "type":"numerical_actual", + "field_name":"country", + "field_value": "uk", + "condition": {"operator":"lt","value":"33.3"} + }, + {"type":"categorical", "field_name":"country", "filter_id": "foo"} + ] + } + ] + } + +--- +"Test job with pre 6.2 rules": + + - skip: + features: "warnings" + reason: certain rule fields were renamed in 6.2.0 + + - do: + warnings: + - Deprecated field [detector_rules] used, expected [rules] instead + - Deprecated field [rule_action] used, expected [actions] instead + - Deprecated field [rule_conditions] used, expected [conditions] instead + - Deprecated field [condition_type] used, expected [type] instead + - Deprecated field [value_filter] used, expected [filter_id] instead + xpack.ml.put_job: + job_id: jobs-crud-pre-6-2-rules + body: > + { + "analysis_config": { + "detectors": [ + { + "function": "count", + "by_field_name": "country", + "detector_rules": [ + { + "rule_action": "filter_results", + "rule_conditions": [ + { + "condition_type":"numerical_actual", + "field_name":"country", + "field_value": "uk", + "condition": {"operator":"lt","value":"33.3"} + }, + {"type":"categorical", "field_name":"country", "value_filter": "foo"} + ] + } + ] + } + ] + }, + "data_description" : {} + } + + - do: + xpack.ml.get_jobs: + job_id: jobs-crud-pre-6-2-rules + - match: { count: 1 } + - match: { + jobs.0.analysis_config.detectors.0.rules: [ + { + "actions": ["filter_results"], + "conditions_connective": "or", + "conditions": [ + { + "type":"numerical_actual", + "field_name":"country", + "field_value": "uk", + "condition": {"operator":"lt","value":"33.3"} + }, + {"type":"categorical", "field_name":"country", "filter_id": "foo"} + ] + } + ] + } diff --git a/plugin/upgrade/build.gradle b/plugin/upgrade/build.gradle index 910467b78ec..ad12e595314 100644 --- a/plugin/upgrade/build.gradle +++ b/plugin/upgrade/build.gradle @@ -1,4 +1,7 @@ -evaluationDependsOn(':x-pack-elasticsearch:plugin:core') +import com.carrotsearch.gradle.junit4.RandomizedTestingTask +import org.elasticsearch.gradle.BuildPlugin + +evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { @@ -6,22 +9,16 @@ esplugin { description 'Elasticsearch Expanded Pack Plugin - Upgrade' classname 'org.elasticsearch.xpack.upgrade.Upgrade' hasNativeController false - requiresKeystore true + requiresKeystore false extendedPlugins = ['x-pack-core'] - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') } archivesBaseName = 'x-pack-upgrade' -test.enabled=false -licenseHeaders.enabled = false - -integTest.enabled = false dependencies { provided "org.elasticsearch:elasticsearch:${version}" provided "org.elasticsearch.plugin:x-pack-core:${version}" - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" @@ -32,5 +29,27 @@ dependencyLicenses { } run { - plugin ':x-pack-elasticsearch:plugin:core' + plugin xpackModule('core') +} + +integTest.enabled = false + +// Instead we create a separate task to run the +// tests based on ESIntegTestCase +task internalClusterTest(type: RandomizedTestingTask, + group: JavaBasePlugin.VERIFICATION_GROUP, + description: 'Multi-node tests', + dependsOn: test.dependsOn) { + configure(BuildPlugin.commonTestConfig(project)) + classpath = project.test.classpath + testClassesDir = project.test.testClassesDir + include '**/*IT.class' + systemProperty 'es.set.netty.runtime.available.processors', 'false' +} +check.dependsOn internalClusterTest +internalClusterTest.mustRunAfter test + +// also add an "alias" task to make typing on the command line easier +task icTest { + dependsOn internalClusterTest } diff --git a/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index c2549857535..07d51438b3d 100644 --- a/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.upgrade; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; @@ -29,6 +30,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThro import static org.hamcrest.Matchers.empty; import static org.hamcrest.core.IsEqual.equalTo; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3729") public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { @Before diff --git a/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIntegTestCase.java b/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIntegTestCase.java index ea7514b5981..cb14d70ecc6 100644 --- a/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIntegTestCase.java +++ b/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIntegTestCase.java @@ -13,13 +13,13 @@ import org.elasticsearch.license.AbstractLicensesIntegrationTestCase; import org.elasticsearch.license.License; import org.elasticsearch.license.TestUtils; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.monitoring.test.MockPainlessScriptEngine; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; public abstract class IndexUpgradeIntegTestCase extends AbstractLicensesIntegrationTestCase { @Override @@ -27,39 +27,15 @@ public abstract class IndexUpgradeIntegTestCase extends AbstractLicensesIntegrat return true; } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal)); - settings.put(MachineLearningField.AUTODETECT_PROCESS.getKey(), false); - settings.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false); - settings.put(XPackSettings.SECURITY_ENABLED.getKey(), false); - settings.put(XPackSettings.WATCHER_ENABLED.getKey(), false); - settings.put(XPackSettings.MONITORING_ENABLED.getKey(), false); - settings.put(XPackSettings.GRAPH_ENABLED.getKey(), false); - return settings.build(); - } - - @Override - protected Settings transportClientSettings() { - Settings.Builder settings = Settings.builder().put(super.transportClientSettings()); - settings.put(MachineLearningField.AUTODETECT_PROCESS.getKey(), false); - settings.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false); - settings.put(XPackSettings.SECURITY_ENABLED.getKey(), false); - settings.put(XPackSettings.WATCHER_ENABLED.getKey(), false); - settings.put(XPackSettings.MONITORING_ENABLED.getKey(), false); - settings.put(XPackSettings.GRAPH_ENABLED.getKey(), false); - return settings.build(); - } - @Override protected Collection> nodePlugins() { - return Arrays.asList(XPackPlugin.class, ReindexPlugin.class, MockPainlessScriptEngine.TestPlugin.class, - CommonAnalysisPlugin.class); + return Arrays.asList(LocalStateCompositeXPackPlugin.class, Upgrade.class, ReindexPlugin.class, + MockPainlessScriptEngine.TestPlugin.class, CommonAnalysisPlugin.class); } @Override protected Collection> transportClientPlugins() { - return nodePlugins(); + return Collections.singletonList(XPackClientPlugin.class); } private static String randomValidLicenseType() { return randomFrom("trial", "platinum", "gold", "standard", "basic"); diff --git a/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java b/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java index f8e99edb802..e6baf5bab14 100644 --- a/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java +++ b/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.upgrade; import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; @@ -32,7 +33,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportResponse; -import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import java.util.ArrayList; import java.util.Arrays; @@ -49,11 +50,13 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsEqual.equalTo; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3729") public class InternalIndexReindexerIT extends IndexUpgradeIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(XPackPlugin.class, ReindexPlugin.class, CustomScriptPlugin.class, CommonAnalysisPlugin.class); + return Arrays.asList(LocalStateCompositeXPackPlugin.class, + ReindexPlugin.class, CustomScriptPlugin.class, CommonAnalysisPlugin.class); } public static class CustomScriptPlugin extends MockScriptPlugin { diff --git a/plugin/watcher/build.gradle b/plugin/watcher/build.gradle index 584d4dd54a8..d9da3e2948f 100644 --- a/plugin/watcher/build.gradle +++ b/plugin/watcher/build.gradle @@ -1,4 +1,4 @@ -evaluationDependsOn(':x-pack-elasticsearch:plugin:core') +evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { @@ -6,19 +6,12 @@ esplugin { description 'Elasticsearch Expanded Pack Plugin - Watcher' classname 'org.elasticsearch.xpack.watcher.Watcher' hasNativeController false - requiresKeystore true + requiresKeystore false extendedPlugins = ['x-pack-core'] - licenseFile project(':x-pack-elasticsearch').file('LICENSE.txt') - noticeFile project(':x-pack-elasticsearch').file('NOTICE.txt') } archivesBaseName = 'x-pack-watcher' -licenseHeaders.enabled = false - -integTest.enabled = false - -// TODO: fix this! https://github.com/elastic/x-plugins/issues/1066 ext.compactProfile = 'full' compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" @@ -36,7 +29,7 @@ dependencies { provided project(path: ':modules:transport-netty4', configuration: 'runtime') provided project(path: ':plugins:transport-nio', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // watcher deps compile 'com.googlecode.owasp-java-html-sanitizer:owasp-java-html-sanitizer:r239' @@ -106,7 +99,7 @@ if (JavaVersion.current() <= JavaVersion.VERSION_1_8) { } run { - plugin ':x-pack-elasticsearch:plugin:core' + plugin xpackModule('core') } test { @@ -117,16 +110,7 @@ test { systemProperty 'es.set.netty.runtime.available.processors', 'false' } -integTestRunner { - /* - * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each - * other if we allow them to set the number of available processors as it's set-once in Netty. - */ - systemProperty 'es.set.netty.runtime.available.processors', 'false' -} - -integTestRunner { - // TODO: this test was failing for the big split merge - def blacklist = ['watcher/execute_watch/*'] - systemProperty 'tests.rest.blacklist', blacklist.join(',') -} +// xpack modules are installed in real clusters as the meta plugin, so +// installing them as individual plugins for integ tests doesn't make sense, +// so we disable integ tests +integTest.enabled = false diff --git a/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/CronnableSchedule.java b/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/CronnableSchedule.java index a0c9833e634..ec309c69476 100644 --- a/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/CronnableSchedule.java +++ b/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/CronnableSchedule.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.watcher.trigger.schedule; +import org.elasticsearch.xpack.core.scheduler.Cron; + import java.util.Arrays; import java.util.Comparator; import java.util.Objects; diff --git a/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalTool.java b/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalTool.java index 3bcd817aad0..33b1217895d 100644 --- a/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalTool.java +++ b/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/tool/CronEvalTool.java @@ -10,12 +10,11 @@ import java.util.List; import joptsimple.OptionSet; import joptsimple.OptionSpec; -import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.LoggingAwareCommand; import org.elasticsearch.cli.UserException; import org.elasticsearch.cli.Terminal; -import org.elasticsearch.xpack.watcher.trigger.schedule.Cron; +import org.elasticsearch.xpack.core.scheduler.Cron; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; diff --git a/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailSecretsIntegrationTests.java b/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailSecretsIntegrationTests.java index 6dab68539d7..b5e229ff9f9 100644 --- a/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailSecretsIntegrationTests.java +++ b/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailSecretsIntegrationTests.java @@ -123,6 +123,7 @@ public class EmailSecretsIntegrationTests extends AbstractWatcherIntegrationTest // now we restart, to make sure the watches and their secrets are reloaded from the index properly assertAcked(watcherClient.prepareWatchService().stop().get()); + ensureWatcherStopped(); assertAcked(watcherClient.prepareWatchService().start().get()); ensureWatcherStarted(); diff --git a/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTestCase.java b/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTestCase.java index 9b518ae4477..525461038e6 100644 --- a/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTestCase.java +++ b/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTestCase.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.scheduler.Cron; import org.elasticsearch.xpack.watcher.trigger.schedule.support.DayOfWeek; import org.elasticsearch.xpack.watcher.trigger.schedule.support.DayTimes; import org.elasticsearch.xpack.watcher.trigger.schedule.support.Month; diff --git a/qa/audit-tests/build.gradle b/qa/audit-tests/build.gradle index 321822e8c40..34b6deaef3f 100644 --- a/qa/audit-tests/build.gradle +++ b/qa/audit-tests/build.gradle @@ -2,22 +2,22 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } String outputDir = "generated-resources/${project.name}" task copyXPackPluginProps(type: Copy) { // wth is this? - from project(':x-pack-elasticsearch:plugin:core').file('src/main/plugin-metadata') - from project(':x-pack-elasticsearch:plugin:core').tasks.pluginProperties - from project(':x-pack-elasticsearch:plugin:security').file('src/main/plugin-metadata') - from project(':x-pack-elasticsearch:plugin:security').tasks.pluginProperties + from project(xpackModule('core')).file('src/main/plugin-metadata') + from project(xpackModule('core')).tasks.pluginProperties + from project(xpackModule('security')).file('src/main/plugin-metadata') + from project(xpackModule('security')).tasks.pluginProperties into outputDir } project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) integTestCluster { distribution 'zip' - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.ml.enabled', 'false' setting 'xpack.security.audit.enabled', 'true' setting 'xpack.security.audit.outputs', 'index' diff --git a/qa/audit-tests/src/test/java/org/elasticsearch/xpack/security/audit/IndexAuditIT.java b/qa/audit-tests/src/test/java/org/elasticsearch/xpack/security/audit/IndexAuditIT.java index 86bc2f57c15..1d4b8004463 100644 --- a/qa/audit-tests/src/test/java/org/elasticsearch/xpack/security/audit/IndexAuditIT.java +++ b/qa/audit-tests/src/test/java/org/elasticsearch/xpack/security/audit/IndexAuditIT.java @@ -61,7 +61,7 @@ public class IndexAuditIT extends ESIntegTestCase { } } - ensureYellow(".security_audit_log*"); + ensureYellowAndNoInitializingShards(".security_audit_log*"); logger.info("security audit log index is yellow"); ClusterState state = client().admin().cluster().prepareState().get().getState(); lastClusterState.set(state); diff --git a/qa/core-rest-tests-with-security/build.gradle b/qa/core-rest-tests-with-security/build.gradle index 53af05e9624..1bb377ba56c 100644 --- a/qa/core-rest-tests-with-security/build.gradle +++ b/qa/core-rest-tests-with-security/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } integTest { @@ -22,7 +22,7 @@ integTestRunner { } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.watcher.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.ml.enabled', 'false' diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 5f7ba160e66..ac4dbb39836 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -11,16 +11,16 @@ apply plugin: 'elasticsearch.build' test.enabled = false dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile (project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'runtime')) { + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile (project(path: xpackModule('security'), configuration: 'runtime')) { // Need to drop the guava dependency here or we get a conflict with watcher's guava dependency. // This is total #$%, but the solution is to get the SAML realm (which uses guava) out of security proper exclude group: "com.google.guava", module: "guava" } - testCompile project(path: ':x-pack-elasticsearch:plugin:watcher', configuration: 'runtime') + testCompile project(path: xpackModule('watcher'), configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile (project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts')) { + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile (project(path: xpackModule('security'), configuration: 'testArtifacts')) { // Need to drop the guava dependency here or we get a conflict with watcher's guava dependency. // This is total #$%, but the solution is to get the SAML realm (which uses guava) out of security proper exclude group: "com.google.guava", module: "guava" @@ -125,7 +125,7 @@ subprojects { String output = "generated-resources/${project.name}" task copyTestNodeKeystore(type: Copy) { - from project(':x-pack-elasticsearch:plugin:core') + from project(xpackModule('core')) .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') into outputDir } @@ -140,7 +140,7 @@ subprojects { Object extension = extensions.findByName("${baseName}#oldClusterTestCluster") configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { dependsOn copyTestNodeKeystore - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path distribution = 'zip' bwcVersion = version numBwcNodes = 2 @@ -190,7 +190,7 @@ subprojects { dependsOn oldClusterTestRunner, "${baseName}#oldClusterTestCluster#node0.stop", "${baseName}#oldClusterTestCluster#node1.stop" - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path distribution = 'zip' numNodes = 2 clusterName = 'full-cluster-restart' @@ -248,24 +248,24 @@ subprojects { check.dependsOn(integTest) dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:watcher', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('watcher'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } // copy x-pack plugin info so it is on the classpath and security manager has the right permissions task copyXPackRestSpec(type: Copy) { dependsOn(project.configurations.restSpec, 'processTestResources') - from project(':x-pack-elasticsearch:plugin:core').sourceSets.test.resources + from project(xpackModule('core')).sourceSets.test.resources include 'rest-api-spec/api/**' into project.sourceSets.test.output.resourcesDir } task copyXPackPluginProps(type: Copy) { dependsOn(copyXPackRestSpec) - from project(':x-pack-elasticsearch:plugin:core').file('src/main/plugin-metadata') - from project(':x-pack-elasticsearch:plugin:core').tasks.pluginProperties + from project(xpackModule('core')).file('src/main/plugin-metadata') + from project(xpackModule('core')).tasks.pluginProperties into outputDir } project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) diff --git a/qa/ml-basic-multi-node/build.gradle b/qa/ml-basic-multi-node/build.gradle index 510ff253170..196c3085b5b 100644 --- a/qa/ml-basic-multi-node/build.gradle +++ b/qa/ml-basic-multi-node/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:ml', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('ml'), configuration: 'runtime') } integTestCluster { @@ -13,5 +13,5 @@ integTestCluster { setting 'xpack.ml.enabled', 'true' numNodes = 3 distribution = 'zip' - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path } diff --git a/qa/ml-disabled/build.gradle b/qa/ml-disabled/build.gradle index fad2844da3b..34cb9a12d35 100644 --- a/qa/ml-disabled/build.gradle +++ b/qa/ml-disabled/build.gradle @@ -2,8 +2,8 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:ml', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('ml'), configuration: 'runtime') } integTestCluster { @@ -11,5 +11,5 @@ integTestCluster { setting 'xpack.ml.enabled', 'false' numNodes = 1 distribution = 'zip' - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path } diff --git a/qa/ml-native-tests/build.gradle b/qa/ml-native-tests/build.gradle new file mode 100644 index 00000000000..b28a568c17d --- /dev/null +++ b/qa/ml-native-tests/build.gradle @@ -0,0 +1,83 @@ +import org.elasticsearch.gradle.LoggedExec + +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-test' + +dependencies { + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('ml'), configuration: 'runtime') + testCompile project(path: xpackModule('ml'), configuration: 'testArtifacts') +} + +integTestRunner { + /* + * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each + * other if we allow them to set the number of available processors as it's set-once in Netty. + */ + systemProperty 'es.set.netty.runtime.available.processors', 'false' +} + +// location of generated keystores and certificates +File keystoreDir = new File(project.buildDir, 'keystore') + +// Generate the node's keystore +File nodeKeystore = new File(keystoreDir, 'test-node.jks') +task createNodeKeyStore(type: LoggedExec) { + doFirst { + if (nodeKeystore.parentFile.exists() == false) { + nodeKeystore.parentFile.mkdirs() + } + if (nodeKeystore.exists()) { + delete nodeKeystore + } + } + executable = new File(project.runtimeJavaHome, 'bin/keytool') + standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) + args '-genkey', + '-alias', 'test-node', + '-keystore', nodeKeystore, + '-keyalg', 'RSA', + '-keysize', '2048', + '-validity', '712', + '-dname', 'CN=smoke-test-plugins-ssl', + '-keypass', 'keypass', + '-storepass', 'keypass' +} + +// Add keystores to test classpath: it expects it there +sourceSets.test.resources.srcDir(keystoreDir) +processTestResources.dependsOn(createNodeKeyStore) + +integTestCluster { + dependsOn createNodeKeyStore + setting 'xpack.ml.enabled', 'true' + setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE' + setting 'xpack.monitoring.enabled', 'false' + setting 'xpack.security.authc.token.enabled', 'true' + setting 'xpack.security.transport.ssl.enabled', 'true' + setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name + setting 'xpack.security.transport.ssl.verification_mode', 'certificate' + setting 'xpack.security.audit.enabled', 'true' + plugin xpackProject('plugin').path + + keystoreSetting 'bootstrap.password', 'x-pack-test-password' + keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' + distribution = 'zip' // this is important since we use the reindex module in ML + + setupCommand 'setupDummyUser', + 'bin/x-pack/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' + + extraConfigFile nodeKeystore.name, nodeKeystore + + waitCondition = { node, ant -> + File tmpFile = new File(node.cwd, 'wait.success') + ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", + dest: tmpFile.toString(), + username: 'x_pack_rest_user', + password: 'x-pack-test-password', + ignoreerrors: true, + retries: 10) + return tmpFile.exists() + } +} diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java similarity index 59% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java index 8343d7132b2..463c1d1e778 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java +++ b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java @@ -14,14 +14,17 @@ import org.elasticsearch.xpack.core.ml.job.config.DetectionRule; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.Operator; import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; import org.elasticsearch.xpack.core.ml.job.config.RuleConditionType; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.junit.After; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -31,6 +34,8 @@ import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isOneOf; /** * An integration test for detection rules @@ -42,7 +47,7 @@ public class DetectionRulesIT extends MlNativeAutodetectIntegTestCase { cleanUp(); } - public void test() throws Exception { + public void testNumericalRule() throws Exception { RuleCondition condition1 = RuleCondition.createNumerical( RuleConditionType.NUMERICAL_ACTUAL, "by_field", @@ -143,6 +148,110 @@ public class DetectionRulesIT extends MlNativeAutodetectIntegTestCase { assertThat(secondHaldRecordByFieldValues, contains("by_field_value_1", "by_field_value_2")); } + public void testCategoricalRule() throws IOException, InterruptedException { + MlFilter safeIps = new MlFilter("safe_ips", Arrays.asList("111.111.111.111", "222.222.222.222")); + assertThat(putMlFilter(safeIps), is(true)); + + RuleCondition condition = RuleCondition.createCategorical("ip", safeIps.getId()); + DetectionRule rule = new DetectionRule.Builder(Collections.singletonList(condition)).build(); + + Detector.Builder detector = new Detector.Builder("count", null); + detector.setRules(Arrays.asList(rule)); + detector.setOverFieldName("ip"); + + AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); + analysisConfig.setBucketSpan(TimeValue.timeValueHours(1)); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + Job.Builder job = new Job.Builder("detection-rule-categorical-test"); + job.setAnalysisConfig(analysisConfig); + job.setDataDescription(dataDescription); + + registerJob(job); + putJob(job); + openJob(job.getId()); + + long timestamp = 1509062400000L; + List data = new ArrayList<>(); + + // Let's send a bunch of random IPs with counts of 1 + for (int bucket = 0; bucket < 20; bucket++) { + for (int i = 0; i < 5; i++) { + data.add(createIpRecord(timestamp, randomAlphaOfLength(10))); + } + timestamp += TimeValue.timeValueHours(1).getMillis(); + } + + // Now send anomalous counts for our filtered IPs plus 333.333.333.333 + List namedIps = Arrays.asList("111.111.111.111", "222.222.222.222", "333.333.333.333"); + long firstAnomalyTime = timestamp; + for (int i = 0; i < 10; i++) { + for (String ip : namedIps) { + data.add(createIpRecord(timestamp, ip)); + } + } + + // Some more normal buckets + for (int bucket = 0; bucket < 3; bucket++) { + for (int i = 0; i < 5; i++) { + data.add(createIpRecord(timestamp, randomAlphaOfLength(10))); + } + timestamp += TimeValue.timeValueHours(1).getMillis(); + } + + postData(job.getId(), joinBetween(0, data.size(), data)); + data = new ArrayList<>(); + flushJob(job.getId(), false); + + List records = getRecords(job.getId()); + assertThat(records.size(), equalTo(1)); + assertThat(records.get(0).getTimestamp().getTime(), equalTo(firstAnomalyTime)); + assertThat(records.get(0).getOverFieldValue(), equalTo("333.333.333.333")); + + // Now let's update the filter + MlFilter updatedFilter = new MlFilter(safeIps.getId(), Collections.singletonList("333.333.333.333")); + assertThat(putMlFilter(updatedFilter), is(true)); + + // We need to give some time for the update to be applied on the autodetect process + Thread.sleep(1000); + + long secondAnomalyTime = timestamp; + // Send another anomalous bucket + for (int i = 0; i < 10; i++) { + for (String ip : namedIps) { + data.add(createIpRecord(timestamp, ip)); + } + } + + // Some more normal buckets + for (int bucket = 0; bucket < 3; bucket++) { + for (int i = 0; i < 5; i++) { + data.add(createIpRecord(timestamp, randomAlphaOfLength(10))); + } + timestamp += TimeValue.timeValueHours(1).getMillis(); + } + + postData(job.getId(), joinBetween(0, data.size(), data)); + flushJob(job.getId(), false); + + GetRecordsAction.Request getRecordsRequest = new GetRecordsAction.Request(job.getId()); + getRecordsRequest.setStart(Long.toString(firstAnomalyTime + 1)); + records = getRecords(getRecordsRequest); + assertThat(records.size(), equalTo(2)); + for (AnomalyRecord record : records) { + assertThat(record.getTimestamp().getTime(), equalTo(secondAnomalyTime)); + assertThat(record.getOverFieldValue(), isOneOf("111.111.111.111", "222.222.222.222")); + } + + closeJob(job.getId()); + } + + private String createIpRecord(long timestamp, String ip) throws IOException { + Map record = new HashMap<>(); + record.put("time", timestamp); + record.put("ip", ip); + return createJsonRecord(record); + } + private String joinBetween(int start, int end, List input) { StringBuilder result = new StringBuilder(); for (int i = start; i < end; i++) { diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java similarity index 95% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 0c2e4051c0e..9304f765c25 100644 --- a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -23,14 +23,18 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.Task; -import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.transport.Netty4Plugin; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; @@ -47,6 +51,7 @@ import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.PostDataAction; import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; @@ -59,6 +64,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskStatus; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; @@ -78,6 +84,8 @@ import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -92,10 +100,19 @@ import static org.hamcrest.Matchers.notNullValue; /** * Base class of ML integration tests that use a native autodetect process */ -abstract class MlNativeAutodetectIntegTestCase extends SecurityIntegTestCase { +abstract class MlNativeAutodetectIntegTestCase extends ESIntegTestCase { private List jobs = new ArrayList<>(); private List datafeeds = new ArrayList<>(); + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateCompositeXPackPlugin.class, Netty4Plugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return Arrays.asList(XPackClientPlugin.class, Netty4Plugin.class); + } @Override protected Settings externalClusterClientSettings() { @@ -393,6 +410,11 @@ abstract class MlNativeAutodetectIntegTestCase extends SecurityIntegTestCase { return forecasts; } + protected boolean putMlFilter(MlFilter filter) { + PutFilterAction.Response response = client().execute(PutFilterAction.INSTANCE, new PutFilterAction.Request(filter)).actionGet(); + return response.isAcknowledged(); + } + @Override protected void ensureClusterStateConsistency() throws IOException { if (cluster() != null && cluster().size() > 0) { diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java diff --git a/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java b/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java similarity index 100% rename from plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java rename to qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java diff --git a/qa/ml-no-bootstrap-tests/build.gradle b/qa/ml-no-bootstrap-tests/build.gradle index 2ad3a1b3a6a..cad5201a67b 100644 --- a/qa/ml-no-bootstrap-tests/build.gradle +++ b/qa/ml-no-bootstrap-tests/build.gradle @@ -1,7 +1,7 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:ml', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('ml'), configuration: 'runtime') } diff --git a/qa/ml-single-node-tests/build.gradle b/qa/ml-single-node-tests/build.gradle index 18e2accfa9d..6063e2fcdf8 100644 --- a/qa/ml-single-node-tests/build.gradle +++ b/qa/ml-single-node-tests/build.gradle @@ -2,11 +2,11 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:ml', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('ml'), configuration: 'runtime') } integTestCluster { setting 'xpack.security.enabled', 'false' - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path } diff --git a/qa/multi-cluster-search-security/build.gradle b/qa/multi-cluster-search-security/build.gradle index 11587ada251..bf66a848f26 100644 --- a/qa/multi-cluster-search-security/build.gradle +++ b/qa/multi-cluster-search-security/build.gradle @@ -3,8 +3,8 @@ import org.elasticsearch.gradle.test.RestIntegTestTask apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } task remoteClusterTest(type: RestIntegTestTask) { @@ -16,7 +16,7 @@ remoteClusterTestCluster { numNodes = 2 clusterName = 'remote-cluster' setting 'search.remote.connect', false - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.watcher.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.ml.enabled', 'false' @@ -42,7 +42,7 @@ task mixedClusterTest(type: RestIntegTestTask) {} mixedClusterTestCluster { dependsOn remoteClusterTestRunner - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.watcher.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.ml.enabled', 'false' diff --git a/qa/multi-node/build.gradle b/qa/multi-node/build.gradle index 746d08878ef..5b03d3df731 100644 --- a/qa/multi-node/build.gradle +++ b/qa/multi-node/build.gradle @@ -2,14 +2,14 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') } integTestCluster { distribution = 'zip' numNodes = 2 clusterName = 'multi-node' - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.watcher.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.ml.enabled', 'false' diff --git a/qa/openldap-tests/build.gradle b/qa/openldap-tests/build.gradle index 5fd5b4ccac0..eb843d17b0b 100644 --- a/qa/openldap-tests/build.gradle +++ b/qa/openldap-tests/build.gradle @@ -1,10 +1,13 @@ +Project idpFixtureProject = xpackProject("test:idp-fixture") +evaluationDependsOn(idpFixtureProject.path) + apply plugin: 'elasticsearch.standalone-test' apply plugin: 'elasticsearch.vagrantsupport' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } processTestResources { @@ -16,18 +19,18 @@ processTestResources { sourceSets { test { resources { - srcDirs += project(":x-pack-elasticsearch:test:idp-fixture").file("src/main/resources/provision/generated") + srcDirs += idpFixtureProject.file("src/main/resources/provision/generated") } } } task openLdapFixture { - dependsOn "vagrantCheckVersion", "virtualboxCheckVersion", ":x-pack-elasticsearch:test:idp-fixture:up" + dependsOn "vagrantCheckVersion", "virtualboxCheckVersion", idpFixtureProject.up } if (project.rootProject.vagrantSupported) { test.dependsOn openLdapFixture - test.finalizedBy ":x-pack-elasticsearch:test:idp-fixture:halt" + test.finalizedBy idpFixtureProject.halt } else { test.enabled = false } diff --git a/qa/reindex-tests-with-security/build.gradle b/qa/reindex-tests-with-security/build.gradle index 5cb95c6999b..fb8a2a02b67 100644 --- a/qa/reindex-tests-with-security/build.gradle +++ b/qa/reindex-tests-with-security/build.gradle @@ -2,14 +2,14 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: ':modules:reindex') } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path // Whitelist reindexing from the local node so we can test it. setting 'reindex.remote.whitelist', '127.0.0.1:*' setting 'xpack.ml.enabled', 'false' diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index f002b41c6d3..8a3daff55b1 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -10,9 +10,9 @@ apply plugin: 'elasticsearch.build' test.enabled = false dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') // to be moved in a later commit + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('security'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') // to be moved in a later commit } Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> @@ -100,7 +100,7 @@ subprojects { String output = "generated-resources/${project.name}" task copyTestNodeKeystore(type: Copy) { - from project(':x-pack-elasticsearch:plugin:core') + from project(xpackModule('core')) .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') into outputDir } @@ -115,7 +115,7 @@ subprojects { Object extension = extensions.findByName("${baseName}#oldClusterTestCluster") configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { dependsOn copyTestNodeKeystore - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' distribution = 'zip' bwcVersion = version @@ -160,7 +160,7 @@ subprojects { configure(extensions.findByName("${baseName}#mixedClusterTestCluster")) { dependsOn oldClusterTestRunner, "${baseName}#oldClusterTestCluster#node1.stop" - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' distribution = 'zip' clusterName = 'rolling-upgrade' @@ -198,7 +198,7 @@ subprojects { configure(extensions.findByName("${baseName}#upgradedClusterTestCluster")) { dependsOn(mixedClusterTestRunner, "${baseName}#oldClusterTestCluster#node0.stop") - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' distribution = 'zip' clusterName = 'rolling-upgrade' @@ -266,9 +266,9 @@ subprojects { check.dependsOn(integTest) dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin:watcher') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('watcher')) } compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" @@ -276,15 +276,15 @@ subprojects { // copy x-pack plugin info so it is on the classpath and security manager has the right permissions task copyXPackRestSpec(type: Copy) { dependsOn(project.configurations.restSpec, 'processTestResources') - from project(':x-pack-elasticsearch:plugin:core').sourceSets.test.resources + from project(xpackProject('plugin').path).sourceSets.test.resources include 'rest-api-spec/api/**' into project.sourceSets.test.output.resourcesDir } task copyXPackPluginProps(type: Copy) { dependsOn(copyXPackRestSpec) - from project(':x-pack-elasticsearch:plugin:core').file('src/main/plugin-metadata') - from project(':x-pack-elasticsearch:plugin:core').tasks.pluginProperties + from project(xpackModule('core')).file('src/main/plugin-metadata') + from project(xpackModule('core')).tasks.pluginProperties into outputDir } project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_ml_jobs_crud.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_ml_jobs_crud.yml index 91936351369..7bfbb5ad8c4 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_ml_jobs_crud.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/30_ml_jobs_crud.yml @@ -7,10 +7,6 @@ setup: --- "Test get old cluster job": - - skip: - version: "all" - reason: "Awaiting fix of #1760" - - do: xpack.ml.get_jobs: job_id: old-cluster-job @@ -48,10 +44,6 @@ setup: --- "Create a job in the mixed cluster and write some data": - - skip: - version: "all" - reason: "Awaiting fix of #1760" - - do: xpack.ml.put_job: job_id: mixed-cluster-job @@ -94,3 +86,11 @@ setup: - do: xpack.ml.close_job: job_id: mixed-cluster-job + +--- +"Test get job with rules": + + - do: + xpack.ml.get_jobs: + job_id: old-cluster-job-with-rules + - match: { count: 1 } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index d4187e2f69a..8a06c91cc8a 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -6,10 +6,6 @@ setup: --- "Test old cluster datafeed": - - skip: - version: "all" - reason: "Awaiting fix of #1760" - - do: xpack.ml.get_datafeeds: datafeed_id: old-cluster-datafeed @@ -26,9 +22,6 @@ setup: --- "Put job and datafeed in mixed cluster": - - skip: - version: "all" - reason: "Awaiting fix of #1760" - do: xpack.ml.put_job: diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/30_ml_jobs_crud.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/30_ml_jobs_crud.yml index a8acecfead9..de78feea78b 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/30_ml_jobs_crud.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/30_ml_jobs_crud.yml @@ -1,8 +1,5 @@ --- "Put job on the old cluster and post some data": - - skip: - version: "all" - reason: "Awaiting fix of #1760" - do: xpack.ml.put_job: @@ -121,3 +118,77 @@ "time_format":"epoch" } } + +--- +"Test job with pre 6.2 rules": + + - skip: + version: "6.2.0 - " + reason: "Rules fields were renamed on 6.2.0" + + - do: + xpack.ml.put_job: + job_id: old-cluster-job-with-rules + body: > + { + "analysis_config": { + "detectors": [ + { + "function": "count", + "by_field_name": "country", + "detector_rules": [ + { + "rule_action": "filter_results", + "rule_conditions": [ + { + "condition_type":"numerical_actual", + "field_name":"country", + "field_value": "uk", + "condition": {"operator":"lt","value":"33.3"} + }, + {"condition_type":"categorical", "field_name":"country", "value_filter": "foo"} + ] + } + ] + } + ] + }, + "data_description" : {} + } + +--- +"Test job with post 6.2 rules": + + - skip: + version: " - 6.1.99" + reason: "Rules fields were renamed on 6.2.0" + + - do: + xpack.ml.put_job: + job_id: old-cluster-job-with-rules + body: > + { + "analysis_config": { + "detectors": [ + { + "function": "count", + "by_field_name": "country", + "rules": [ + { + "actions": ["filter_results"], + "conditions": [ + { + "type":"numerical_actual", + "field_name":"country", + "field_value": "uk", + "condition": {"operator":"lt","value":"33.3"} + }, + {"type":"categorical", "field_name":"country", "filter_id": "foo"} + ] + } + ] + } + ] + }, + "data_description" : {} + } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml index be30862b526..c1317bdf3d6 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml @@ -1,8 +1,5 @@ --- "Put job and datafeed in old cluster": - - skip: - version: "all" - reason: "Awaiting fix of #1760" - do: xpack.ml.put_job: diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml index 1acfad77b26..1ea309b0902 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/30_ml_jobs_crud.yml @@ -8,9 +8,6 @@ setup: --- "Test open old jobs": - - skip: - version: "all" - reason: "Awaiting fix of #1760" - do: xpack.ml.open_job: @@ -97,3 +94,27 @@ setup: job_id: no-model-memory-limit-job - match: { acknowledged: true } +--- +"Test get job with rules": + + - do: + xpack.ml.get_jobs: + job_id: old-cluster-job-with-rules + - match: { count: 1 } + - match: { + jobs.0.analysis_config.detectors.0.rules: [ + { + "actions": ["filter_results"], + "conditions_connective": "or", + "conditions": [ + { + "type":"numerical_actual", + "field_name":"country", + "field_value": "uk", + "condition": {"operator":"lt","value":"33.3"} + }, + {"type":"categorical", "field_name":"country", "filter_id": "foo"} + ] + } + ] + } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml index c6026ba94fd..ed6a66ae1a5 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml @@ -18,10 +18,6 @@ setup: --- "Test old and mixed cluster datafeeds": - - skip: - version: "all" - reason: "Awaiting fix of #1760" - - do: xpack.ml.get_datafeeds: datafeed_id: old-cluster-datafeed diff --git a/qa/saml-idp-tests/build.gradle b/qa/saml-idp-tests/build.gradle index e3a87e7af9f..7632b82bae1 100644 --- a/qa/saml-idp-tests/build.gradle +++ b/qa/saml-idp-tests/build.gradle @@ -1,10 +1,13 @@ +Project idpFixtureProject = xpackProject("test:idp-fixture") +evaluationDependsOn(idpFixtureProject.path) + apply plugin: 'elasticsearch.standalone-test' apply plugin: 'elasticsearch.vagrantsupport' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile 'com.google.jimfs:jimfs:1.1' } @@ -17,19 +20,19 @@ processTestResources { sourceSets { test { resources { - srcDirs += project(":x-pack-elasticsearch:test:idp-fixture").file("src/main/resources/provision/generated") - srcDirs += project(':x-pack-elasticsearch:plugin:security').file('src/test/resources') + srcDirs += idpFixtureProject.file("src/main/resources/provision/generated") + srcDirs += project(xpackModule('security')).file('src/test/resources') } } } task idpFixture { - dependsOn "vagrantCheckVersion", "virtualboxCheckVersion", ":x-pack-elasticsearch:test:idp-fixture:up" + dependsOn "vagrantCheckVersion", "virtualboxCheckVersion", idpFixtureProject.up } if (project.rootProject.vagrantSupported) { test.dependsOn idpFixture - test.finalizedBy ":x-pack-elasticsearch:test:idp-fixture:halt" + test.finalizedBy idpFixtureProject.halt } else { test.enabled = false } diff --git a/qa/security-client-tests/build.gradle b/qa/security-client-tests/build.gradle index e003310864a..a99d0ecf57c 100644 --- a/qa/security-client-tests/build.gradle +++ b/qa/security-client-tests/build.gradle @@ -2,14 +2,14 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:transport-client', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } String outputDir = "generated-resources/${project.name}" task copyXPackPluginProps(type: Copy) { - from project(':x-pack-elasticsearch:plugin:core').file('src/main/plugin-metadata') - from project(':x-pack-elasticsearch:plugin:core').tasks.pluginProperties + from project(xpackModule('core')).file('src/main/plugin-metadata') + from project(xpackModule('core')).tasks.pluginProperties into outputDir } project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) @@ -19,7 +19,7 @@ integTestRunner { } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.ml.enabled', 'false' setupCommand 'setupDummyUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' diff --git a/qa/security-example-extension/build.gradle b/qa/security-example-extension/build.gradle deleted file mode 100644 index b532f3fba41..00000000000 --- a/qa/security-example-extension/build.gradle +++ /dev/null @@ -1,85 +0,0 @@ -import org.elasticsearch.gradle.MavenFilteringHack -import org.elasticsearch.gradle.VersionProperties - -apply plugin: 'elasticsearch.build' - -dependencies { - provided "org.elasticsearch:elasticsearch:${versions.elasticsearch}" - provided project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - - testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}" - testCompile project(path: ':x-pack-elasticsearch:transport-client', configuration: 'runtime') -} - -Map generateSubstitutions() { - def stringSnap = { version -> - if (version.endsWith("-SNAPSHOT")) { - return version.substring(0, version.length() - 9) - } - return version - } - return [ - 'version': stringSnap(version), - 'xpack.version': stringSnap(VersionProperties.elasticsearch), - 'java.version': targetCompatibility as String - ] -} - -String outputDir = "generated-resources/${project.name}" -task copyXPackPluginProps(type: Copy) { - from project(':x-pack-elasticsearch:plugin:core').file('src/main/plugin-metadata') - from project(':x-pack-elasticsearch:plugin:core').tasks.pluginProperties - into outputDir -} -project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) - -processResources { - MavenFilteringHack.filter(it, generateSubstitutions()) -} - -task buildZip(type:Zip, dependsOn: [jar]) { - from 'build/resources/main/x-pack-extension-descriptor.properties' - from 'build/resources/main/x-pack-extension-security.policy' - from project.jar -} - -task integTest(type: org.elasticsearch.gradle.test.RestIntegTestTask) { - mustRunAfter precommit -} - -integTestRunner { - systemProperty 'tests.security.manager', 'false' -} -integTestCluster { - dependsOn buildZip - plugin ':x-pack-elasticsearch:plugin' - setting 'xpack.security.authc.realms.custom.order', '0' - setting 'xpack.security.authc.realms.custom.type', 'custom' - setting 'xpack.security.authc.realms.custom.filtered_setting', 'should be filtered' - setting 'xpack.security.authc.realms.esusers.order', '1' - setting 'xpack.security.authc.realms.esusers.type', 'file' - setting 'xpack.security.authc.realms.native.type', 'native' - setting 'xpack.security.authc.realms.native.order', '2' - setting 'xpack.ml.enabled', 'false' - - // This is important, so that all the modules are available too. - // There are index templates that use token filters that are in analysis-module and - // processors are being used that are in ingest-common module. - distribution = 'zip' - - setupCommand 'setupDummyUser', - 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' - setupCommand 'installExtension', - 'bin/x-pack/extension', 'install', 'file:' + buildZip.archivePath - waitCondition = { node, ant -> - File tmpFile = new File(node.cwd, 'wait.success') - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", - dest: tmpFile.toString(), - username: 'test_user', - password: 'x-pack-test-password', - ignoreerrors: true, - retries: 10) - return tmpFile.exists() - } -} -check.dependsOn integTest diff --git a/qa/security-example-extension/src/main/java/org/elasticsearch/example/ExampleExtension.java b/qa/security-example-extension/src/main/java/org/elasticsearch/example/ExampleExtension.java deleted file mode 100644 index 37679692938..00000000000 --- a/qa/security-example-extension/src/main/java/org/elasticsearch/example/ExampleExtension.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.example; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.example.realm.CustomAuthenticationFailureHandler; -import org.elasticsearch.example.realm.CustomRealm; -import org.elasticsearch.example.role.CustomInMemoryRolesProvider; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.core.extensions.XPackExtension; -import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler; -import org.elasticsearch.xpack.core.security.authc.Realm; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.BiConsumer; - -import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.ROLE_A; -import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.ROLE_B; - -/** - * An example x-pack extension for testing custom realms and custom role providers. - */ -public class ExampleExtension extends XPackExtension { - - static { - // check that the extension's policy works. - AccessController.doPrivileged((PrivilegedAction) () -> { - System.getSecurityManager().checkPrintJobAccess(); - return null; - }); - } - - @Override - public String name() { - return "custom realm example"; - } - - @Override - public String description() { - return "a very basic implementation of a custom realm to validate it works"; - } - - @Override - public Map getRealms(ResourceWatcherService resourceWatcherService) { - return Collections.singletonMap(CustomRealm.TYPE, CustomRealm::new); - } - - @Override - public AuthenticationFailureHandler getAuthenticationFailureHandler() { - return new CustomAuthenticationFailureHandler(); - } - - @Override - public Collection getRestHeaders() { - return Arrays.asList(CustomRealm.USER_HEADER, CustomRealm.PW_HEADER); - } - - @Override - public List getSettingsFilter() { - return Collections.singletonList("xpack.security.authc.realms.*.filtered_setting"); - } - - @Override - public List, ActionListener>>> - getRolesProviders(Settings settings, ResourceWatcherService resourceWatcherService) { - CustomInMemoryRolesProvider rp1 = new CustomInMemoryRolesProvider(settings, Collections.singletonMap(ROLE_A, "read")); - Map roles = new HashMap<>(); - roles.put(ROLE_A, "all"); - roles.put(ROLE_B, "all"); - CustomInMemoryRolesProvider rp2 = new CustomInMemoryRolesProvider(settings, roles); - return Arrays.asList(rp1, rp2); - } -} diff --git a/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomAuthenticationFailureHandler.java b/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomAuthenticationFailureHandler.java deleted file mode 100644 index e8cd4fc6ebf..00000000000 --- a/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomAuthenticationFailureHandler.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.example.realm; - -import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; -import org.elasticsearch.xpack.core.security.authc.DefaultAuthenticationFailureHandler; -import org.elasticsearch.transport.TransportMessage; - -public class CustomAuthenticationFailureHandler extends DefaultAuthenticationFailureHandler { - - @Override - public ElasticsearchSecurityException failedAuthentication(RestRequest request, AuthenticationToken token, - ThreadContext context) { - ElasticsearchSecurityException e = super.failedAuthentication(request, token, context); - // set a custom header - e.addHeader("WWW-Authenticate", "custom-challenge"); - return e; - } - - @Override - public ElasticsearchSecurityException failedAuthentication(TransportMessage message, AuthenticationToken token, String action, - ThreadContext context) { - ElasticsearchSecurityException e = super.failedAuthentication(message, token, action, context); - // set a custom header - e.addHeader("WWW-Authenticate", "custom-challenge"); - return e; - } - - @Override - public ElasticsearchSecurityException missingToken(RestRequest request, ThreadContext context) { - ElasticsearchSecurityException e = super.missingToken(request, context); - // set a custom header - e.addHeader("WWW-Authenticate", "custom-challenge"); - return e; - } - - @Override - public ElasticsearchSecurityException missingToken(TransportMessage message, String action, ThreadContext context) { - ElasticsearchSecurityException e = super.missingToken(message, action, context); - // set a custom header - e.addHeader("WWW-Authenticate", "custom-challenge"); - return e; - } -} diff --git a/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java b/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java deleted file mode 100644 index 19ef9d2eb0d..00000000000 --- a/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.example.realm; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; -import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; -import org.elasticsearch.xpack.core.security.authc.Realm; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.core.security.authc.support.CharArrays; -import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.xpack.core.security.user.User; - -public class CustomRealm extends Realm { - - public static final String TYPE = "custom"; - - public static final String USER_HEADER = "User"; - public static final String PW_HEADER = "Password"; - - public static final String KNOWN_USER = "custom_user"; - public static final SecureString KNOWN_PW = new SecureString("x-pack-test-password".toCharArray()); - static final String[] ROLES = new String[] { "superuser" }; - - public CustomRealm(RealmConfig config) { - super(TYPE, config); - } - - @Override - public boolean supports(AuthenticationToken token) { - return token instanceof UsernamePasswordToken; - } - - @Override - public UsernamePasswordToken token(ThreadContext threadContext) { - String user = threadContext.getHeader(USER_HEADER); - if (user != null) { - String password = threadContext.getHeader(PW_HEADER); - if (password != null) { - return new UsernamePasswordToken(user, new SecureString(password.toCharArray())); - } - } - return null; - } - - @Override - public void authenticate(AuthenticationToken authToken, ActionListener listener) { - UsernamePasswordToken token = (UsernamePasswordToken)authToken; - final String actualUser = token.principal(); - if (KNOWN_USER.equals(actualUser)) { - if (CharArrays.constantTimeEquals(token.credentials().getChars(), KNOWN_PW.getChars())) { - listener.onResponse(AuthenticationResult.success(new User(actualUser, ROLES))); - } else { - listener.onResponse(AuthenticationResult.unsuccessful("Invalid password for user " + actualUser, null)); - } - } else { - listener.onResponse(AuthenticationResult.notHandled()); - } - } - - @Override - public void lookupUser(String username, ActionListener listener) { - listener.onResponse(null); - } -} diff --git a/qa/security-example-extension/src/main/java/org/elasticsearch/example/role/CustomInMemoryRolesProvider.java b/qa/security-example-extension/src/main/java/org/elasticsearch/example/role/CustomInMemoryRolesProvider.java deleted file mode 100644 index df9d3b5a6b8..00000000000 --- a/qa/security-example-extension/src/main/java/org/elasticsearch/example/role/CustomInMemoryRolesProvider.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.example.role; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.function.BiConsumer; - -/** - * A custom roles provider implementation for testing that serves - * static roles from memory. - */ -public class CustomInMemoryRolesProvider - extends AbstractComponent - implements BiConsumer, ActionListener>> { - - public static final String INDEX = "foo"; - public static final String ROLE_A = "roleA"; - public static final String ROLE_B = "roleB"; - - private final Map rolePermissionSettings; - - public CustomInMemoryRolesProvider(Settings settings, Map rolePermissionSettings) { - super(settings); - this.rolePermissionSettings = rolePermissionSettings; - } - - @Override - public void accept(Set roles, ActionListener> listener) { - Set roleDescriptors = new HashSet<>(); - for (String role : roles) { - if (rolePermissionSettings.containsKey(role)) { - roleDescriptors.add( - new RoleDescriptor(role, new String[] { "all" }, - new RoleDescriptor.IndicesPrivileges[] { - RoleDescriptor.IndicesPrivileges.builder() - .privileges(rolePermissionSettings.get(role)) - .indices(INDEX) - .grantedFields("*") - .build() - }, null) - ); - } - } - - listener.onResponse(roleDescriptors); - } -} diff --git a/qa/security-example-extension/src/main/resources/x-pack-extension-descriptor.properties b/qa/security-example-extension/src/main/resources/x-pack-extension-descriptor.properties deleted file mode 100644 index a849b42c24d..00000000000 --- a/qa/security-example-extension/src/main/resources/x-pack-extension-descriptor.properties +++ /dev/null @@ -1,6 +0,0 @@ -description=Custom Extension -version=${version} -name=exampleextension -classname=org.elasticsearch.example.ExampleExtension -java.version=${java.version} -xpack.version=${xpack.version} \ No newline at end of file diff --git a/qa/security-example-extension/src/main/resources/x-pack-extension-security.policy b/qa/security-example-extension/src/main/resources/x-pack-extension-security.policy deleted file mode 100644 index 6d05deba55c..00000000000 --- a/qa/security-example-extension/src/main/resources/x-pack-extension-security.policy +++ /dev/null @@ -1,3 +0,0 @@ -grant { - permission java.lang.RuntimePermission "queuePrintJob"; -}; \ No newline at end of file diff --git a/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java b/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java deleted file mode 100644 index f1e2a983535..00000000000 --- a/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmIT.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.example.realm; - -import org.apache.http.message.BasicHeader; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.transport.NoNodeAvailableException; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.env.Environment; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xpack.core.XPackClientPlugin; -import org.elasticsearch.xpack.client.PreBuiltXPackTransportClient; - -import java.util.Collection; -import java.util.Collections; -import java.util.List; - -import static org.hamcrest.Matchers.is; - -/** - * Integration test to test authentication with the custom realm - */ -public class CustomRealmIT extends ESIntegTestCase { - - @Override - protected Settings externalClusterClientSettings() { - return Settings.builder() - .put(ThreadContext.PREFIX + "." + CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER) - .put(ThreadContext.PREFIX + "." + CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString()) - .put(NetworkModule.TRANSPORT_TYPE_KEY, "security4") - .build(); - } - - @Override - protected Collection> transportClientPlugins() { - return Collections.singleton(XPackClientPlugin.class); - } - - public void testHttpConnectionWithNoAuthentication() throws Exception { - try { - getRestClient().performRequest("GET", "/"); - fail("request should have failed"); - } catch(ResponseException e) { - Response response = e.getResponse(); - assertThat(response.getStatusLine().getStatusCode(), is(401)); - String value = response.getHeader("WWW-Authenticate"); - assertThat(value, is("custom-challenge")); - } - } - - public void testHttpAuthentication() throws Exception { - Response response = getRestClient().performRequest("GET", "/", - new BasicHeader(CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER), - new BasicHeader(CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString())); - assertThat(response.getStatusLine().getStatusCode(), is(200)); - } - - public void testTransportClient() throws Exception { - NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); - List nodes = nodeInfos.getNodes(); - assertTrue(nodes.isEmpty() == false); - TransportAddress publishAddress = randomFrom(nodes).getTransport().address().publishAddress(); - String clusterName = nodeInfos.getClusterName().value(); - - Settings settings = Settings.builder() - .put("cluster.name", clusterName) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) - .put(ThreadContext.PREFIX + "." + CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER) - .put(ThreadContext.PREFIX + "." + CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString()) - .build(); - try (TransportClient client = new PreBuiltXPackTransportClient(settings)) { - client.addTransportAddress(publishAddress); - ClusterHealthResponse response = client.admin().cluster().prepareHealth().execute().actionGet(); - assertThat(response.isTimedOut(), is(false)); - } - } - - public void testTransportClientWrongAuthentication() throws Exception { - NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); - List nodes = nodeInfos.getNodes(); - assertTrue(nodes.isEmpty() == false); - TransportAddress publishAddress = randomFrom(nodes).getTransport().address().publishAddress(); - String clusterName = nodeInfos.getClusterName().value(); - - Settings settings = Settings.builder() - .put("cluster.name", clusterName) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) - .put(ThreadContext.PREFIX + "." + CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER + randomAlphaOfLength(1)) - .put(ThreadContext.PREFIX + "." + CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString()) - .build(); - try (TransportClient client = new PreBuiltXPackTransportClient(settings)) { - client.addTransportAddress(publishAddress); - client.admin().cluster().prepareHealth().execute().actionGet(); - fail("authentication failure should have resulted in a NoNodesAvailableException"); - } catch (NoNodeAvailableException e) { - // expected - } - } - - public void testSettingsFiltering() throws Exception { - NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().clear().setSettings(true).get(); - for(NodeInfo info : nodeInfos.getNodes()) { - Settings settings = info.getSettings(); - assertNotNull(settings); - assertNull(settings.get("xpack.security.authc.realms.custom.filtered_setting")); - assertEquals(CustomRealm.TYPE, settings.get("xpack.security.authc.realms.custom.type")); - } - } -} diff --git a/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmTests.java b/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmTests.java deleted file mode 100644 index 123bdacf17a..00000000000 --- a/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmTests.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.example.realm; - -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.xpack.core.security.user.User; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; - -public class CustomRealmTests extends ESTestCase { - public void testAuthenticate() { - Settings globalSettings = Settings.builder().put("path.home", createTempDir()).build(); - CustomRealm realm = new CustomRealm(new RealmConfig("test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings))); - SecureString password = CustomRealm.KNOWN_PW.clone(); - UsernamePasswordToken token = new UsernamePasswordToken(CustomRealm.KNOWN_USER, password); - PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - realm.authenticate(token, plainActionFuture); - User user = plainActionFuture.actionGet().getUser(); - assertThat(user, notNullValue()); - assertThat(user.roles(), equalTo(CustomRealm.ROLES)); - assertThat(user.principal(), equalTo(CustomRealm.KNOWN_USER)); - } - - public void testAuthenticateBadUser() { - Settings globalSettings = Settings.builder().put("path.home", createTempDir()).build(); - CustomRealm realm = new CustomRealm(new RealmConfig("test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings))); - SecureString password = CustomRealm.KNOWN_PW.clone(); - UsernamePasswordToken token = new UsernamePasswordToken(CustomRealm.KNOWN_USER + "1", password); - PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - realm.authenticate(token, plainActionFuture); - final AuthenticationResult result = plainActionFuture.actionGet(); - assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.CONTINUE)); - } -} diff --git a/qa/security-example-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java b/qa/security-example-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java deleted file mode 100644 index 4e1fb722560..00000000000 --- a/qa/security-example-extension/src/test/java/org/elasticsearch/example/role/CustomRolesProviderIT.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.example.role; - -import org.apache.http.message.BasicHeader; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.example.realm.CustomRealm; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xpack.core.XPackClientPlugin; -import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.xpack.core.security.client.SecurityClient; - -import java.util.Collection; -import java.util.Collections; - -import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.INDEX; -import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.ROLE_A; -import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.ROLE_B; -import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; -import static org.hamcrest.Matchers.is; - -/** - * Integration test for custom roles providers. - */ -public class CustomRolesProviderIT extends ESIntegTestCase { - - private static final String TEST_USER = "test_user"; - private static final String TEST_PWD = "change_me"; - - @Override - protected Settings externalClusterClientSettings() { - return Settings.builder() - .put(ThreadContext.PREFIX + "." + CustomRealm.USER_HEADER, CustomRealm.KNOWN_USER) - .put(ThreadContext.PREFIX + "." + CustomRealm.PW_HEADER, CustomRealm.KNOWN_PW.toString()) - .put(NetworkModule.TRANSPORT_TYPE_KEY, "security4") - .build(); - } - - @Override - protected Collection> transportClientPlugins() { - return Collections.singleton(XPackClientPlugin.class); - } - - public void setupTestUser(String role) { - SecurityClient securityClient = new SecurityClient(client()); - securityClient.preparePutUser(TEST_USER, TEST_PWD.toCharArray(), role).get(); - } - - public void testAuthorizedCustomRoleSucceeds() throws Exception { - setupTestUser(ROLE_B); - // roleB has all permissions on index "foo", so creating "foo" should succeed - Response response = getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); - assertThat(response.getStatusLine().getStatusCode(), is(200)); - } - - public void testFirstResolvedRoleTakesPrecedence() throws Exception { - // the first custom roles provider has set ROLE_A to only have read permission on the index, - // the second custom roles provider has set ROLE_A to have all permissions, but since - // the first custom role provider appears first in order, it should take precedence and deny - // permission to create the index - setupTestUser(ROLE_A); - // roleB has all permissions on index "foo", so creating "foo" should succeed - try { - getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); - fail(ROLE_A + " should not be authorized to create index " + INDEX); - } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); - } - } - - public void testUnresolvedRoleDoesntSucceed() throws Exception { - setupTestUser("unknown"); - // roleB has all permissions on index "foo", so creating "foo" should succeed - try { - getRestClient().performRequest("PUT", "/" + INDEX, authHeader()); - fail(ROLE_A + " should not be authorized to create index " + INDEX); - } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); - } - } - - private BasicHeader authHeader() { - return new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - basicAuthHeaderValue(TEST_USER, new SecureString(TEST_PWD.toCharArray()))); - } -} diff --git a/qa/security-example-spi-extension/build.gradle b/qa/security-example-spi-extension/build.gradle index f6e5e44398a..fa53d6e4e2e 100644 --- a/qa/security-example-spi-extension/build.gradle +++ b/qa/security-example-spi-extension/build.gradle @@ -8,8 +8,8 @@ esplugin { } dependencies { - provided project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:transport-client', configuration: 'runtime') + provided project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } @@ -19,7 +19,7 @@ integTestRunner { integTestCluster { dependsOn buildZip - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.security.authc.realms.custom.order', '0' setting 'xpack.security.authc.realms.custom.type', 'custom' setting 'xpack.security.authc.realms.custom.filtered_setting', 'should be filtered' diff --git a/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java b/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java index 88b6e88aa19..e426265c8a4 100644 --- a/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java +++ b/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java @@ -14,7 +14,7 @@ import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.SecurityExtension; +import org.elasticsearch.xpack.core.security.SecurityExtension; import java.security.AccessController; import java.security.PrivilegedAction; diff --git a/qa/security-example-spi-extension/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.SecurityExtension b/qa/security-example-spi-extension/src/main/resources/META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension similarity index 100% rename from qa/security-example-spi-extension/src/main/resources/META-INF/services/org.elasticsearch.xpack.security.SecurityExtension rename to qa/security-example-spi-extension/src/main/resources/META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension diff --git a/qa/security-migrate-tests/build.gradle b/qa/security-migrate-tests/build.gradle index 860c72c8fa6..d3821c5f5a7 100644 --- a/qa/security-migrate-tests/build.gradle +++ b/qa/security-migrate-tests/build.gradle @@ -2,13 +2,13 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:transport-client', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('security'), configuration: 'runtime') + testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path extraConfigFile 'x-pack/roles.yml', 'roles.yml' [ test_admin: 'superuser', diff --git a/qa/security-setup-password-tests/build.gradle b/qa/security-setup-password-tests/build.gradle index e1b74de5aab..c6a6e652260 100644 --- a/qa/security-setup-password-tests/build.gradle +++ b/qa/security-setup-password-tests/build.gradle @@ -2,9 +2,9 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('security'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } integTestRunner { @@ -12,7 +12,7 @@ integTestRunner { } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setupCommand 'setupTestAdmin', 'bin/x-pack/users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', "superuser" waitCondition = { node, ant -> diff --git a/qa/security-tools-tests/build.gradle b/qa/security-tools-tests/build.gradle index 5371a348dbf..5df22c557db 100644 --- a/qa/security-tools-tests/build.gradle +++ b/qa/security-tools-tests/build.gradle @@ -1,14 +1,14 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile project(':x-pack-elasticsearch:plugin:security') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') + testCompile project(xpackModule('security')) + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile 'com.google.jimfs:jimfs:1.1' testCompile 'com.google.guava:guava:16.0.1' } // add test resources from security, so certificate tool tests can use example certs -sourceSets.test.resources.srcDirs(project(':x-pack-elasticsearch:plugin:security').sourceSets.test.resources.srcDirs) +sourceSets.test.resources.srcDirs(project(xpackModule('security')).sourceSets.test.resources.srcDirs) // we have to repeate these patterns because the security test resources are effectively in the src of this project forbiddenPatterns { diff --git a/qa/smoke-test-graph-with-security/build.gradle b/qa/smoke-test-graph-with-security/build.gradle index 4f48c21f45d..88d5368bf9f 100644 --- a/qa/smoke-test-graph-with-security/build.gradle +++ b/qa/smoke-test-graph-with-security/build.gradle @@ -2,19 +2,19 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') } // bring in graph rest test suite task copyGraphRestTests(type: Copy) { into project.sourceSets.test.output.resourcesDir - from project(':x-pack-elasticsearch:plugin').sourceSets.test.resources.srcDirs + from project(xpackProject('plugin').path).sourceSets.test.resources.srcDirs include 'rest-api-spec/test/graph/**' } integTestCluster { dependsOn copyGraphRestTests - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path extraConfigFile 'x-pack/roles.yml', 'roles.yml' setupCommand 'setupTestAdminUser', 'bin/x-pack/users', 'useradd', 'test_admin', '-p', 'x-pack-test-password', '-r', 'superuser' diff --git a/qa/smoke-test-ml-with-security/build.gradle b/qa/smoke-test-ml-with-security/build.gradle index 20c2cd44482..0ff59a76b6d 100644 --- a/qa/smoke-test-ml-with-security/build.gradle +++ b/qa/smoke-test-ml-with-security/build.gradle @@ -2,15 +2,15 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } // bring in machine learning rest test suite task copyMlRestTests(type: Copy) { into project.sourceSets.test.output.resourcesDir - from project(':x-pack-elasticsearch:plugin').sourceSets.test.resources.srcDirs + from project(xpackProject('plugin').path).sourceSets.test.resources.srcDirs include 'rest-api-spec/test/ml/**' } @@ -91,7 +91,7 @@ integTestRunner { integTestCluster { dependsOn copyMlRestTests - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path extraConfigFile 'x-pack/roles.yml', 'roles.yml' setupCommand 'setupTestAdminUser', 'bin/x-pack/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' diff --git a/qa/smoke-test-monitoring-with-watcher/build.gradle b/qa/smoke-test-monitoring-with-watcher/build.gradle index c9e739961f6..430939994a6 100644 --- a/qa/smoke-test-monitoring-with-watcher/build.gradle +++ b/qa/smoke-test-monitoring-with-watcher/build.gradle @@ -2,13 +2,13 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core') - testCompile project(path: ':x-pack-elasticsearch:plugin:watcher') - testCompile project(path: ':x-pack-elasticsearch:plugin:monitoring') + testCompile project(path: xpackModule('core')) + testCompile project(path: xpackModule('watcher')) + testCompile project(path: xpackModule('monitoring')) } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.monitoring.enabled', 'true' setting 'xpack.watcher.enabled', 'true' setting 'xpack.security.enabled', 'false' diff --git a/qa/smoke-test-plugins-ssl/build.gradle b/qa/smoke-test-plugins-ssl/build.gradle index ebe1d754e75..a4ceb257f39 100644 --- a/qa/smoke-test-plugins-ssl/build.gradle +++ b/qa/smoke-test-plugins-ssl/build.gradle @@ -14,13 +14,13 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') } String outputDir = "generated-resources/${project.name}" task copyXPackPluginProps(type: Copy) { - from project(':x-pack-elasticsearch:plugin:core').file('src/main/plugin-metadata') - from project(':x-pack-elasticsearch:plugin:core').tasks.pluginProperties + from project(xpackModule('core')).file('src/main/plugin-metadata') + from project(xpackModule('core')).tasks.pluginProperties into outputDir } project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackPluginProps) @@ -160,7 +160,7 @@ integTestCluster.dependsOn(importClientCertificateInNodeKeyStore, importNodeCert ext.pluginsCount = 0 -project(':x-pack-elasticsearch:plugin').subprojects { Project p -> +project(xpackProject('plugin').path).subprojects { Project p -> // the meta plugin contains the individual xpack plugins if (p.extensions.findByName('esplugin') != null) { pluginsCount += 1 @@ -188,7 +188,7 @@ integTestCluster { setting 'xpack.ml.enabled', 'false' - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path // copy keystores into config/ extraConfigFile nodeKeystore.name, nodeKeystore diff --git a/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index d87ae7b8b38..06f66990ad4 100644 --- a/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -89,7 +89,7 @@ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase { }); // Waits for indices to be ready - ensureYellow(MONITORING_PATTERN); + ensureYellowAndNoInitializingShards(MONITORING_PATTERN); // Checks that the HTTP exporter has successfully exported some data assertBusy(() -> { diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index bc10dd376cf..560b336abd0 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -4,11 +4,11 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') } ext.pluginsCount = 0 -project(':x-pack-elasticsearch:plugin').subprojects { Project p -> +project(xpackProject('plugin').path).subprojects { Project p -> // the meta plugin contains the individual xpack plugins if (p.extensions.findByName('esplugin') != null) { pluginsCount += 1 @@ -21,7 +21,7 @@ project.rootProject.subprojects.findAll { it.path.startsWith(':plugins:') }.each } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setupCommand 'setupDummyUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' diff --git a/qa/smoke-test-security-with-mustache/build.gradle b/qa/smoke-test-security-with-mustache/build.gradle index e3a7187a2e9..fa0ecc49ebd 100644 --- a/qa/smoke-test-security-with-mustache/build.gradle +++ b/qa/smoke-test-security-with-mustache/build.gradle @@ -2,13 +2,13 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.watcher.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setupCommand 'setupDummyUser', diff --git a/qa/smoke-test-watcher-with-mustache/build.gradle b/qa/smoke-test-watcher-with-mustache/build.gradle index 06c54e7f8d1..cbd848aca02 100644 --- a/qa/smoke-test-watcher-with-mustache/build.gradle +++ b/qa/smoke-test-watcher-with-mustache/build.gradle @@ -2,13 +2,13 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:watcher', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('watcher'), configuration: 'runtime') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.security.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' } diff --git a/qa/smoke-test-watcher-with-painless/build.gradle b/qa/smoke-test-watcher-with-painless/build.gradle index 186b10b55e6..b4b57a682c5 100644 --- a/qa/smoke-test-watcher-with-painless/build.gradle +++ b/qa/smoke-test-watcher-with-painless/build.gradle @@ -2,12 +2,12 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') testCompile project(path: ':modules:lang-painless', configuration: 'runtime') } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.security.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' } diff --git a/qa/smoke-test-watcher-with-security/build.gradle b/qa/smoke-test-watcher-with-security/build.gradle index cd17ff436fe..81c313d108c 100644 --- a/qa/smoke-test-watcher-with-security/build.gradle +++ b/qa/smoke-test-watcher-with-security/build.gradle @@ -2,13 +2,13 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') } // bring in watcher rest test suite task copyWatcherRestTests(type: Copy) { into project.sourceSets.test.output.resourcesDir - from project(':x-pack-elasticsearch:plugin').sourceSets.test.resources.srcDirs + from project(xpackProject('plugin').path).sourceSets.test.resources.srcDirs include 'rest-api-spec/test/watcher/**' } @@ -28,7 +28,7 @@ integTestCluster { setting 'xpack.notification.email.account._email.smtp.port', '587' setting 'xpack.notification.email.account._email.smtp.user', '_user' setting 'xpack.notification.email.account._email.smtp.password', '_passwd' - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path extraConfigFile 'x-pack/roles.yml', 'roles.yml' setupCommand 'setupTestAdminUser', 'bin/x-pack/users', 'useradd', 'test_admin', '-p', 'x-pack-test-password', '-r', 'superuser' diff --git a/qa/smoke-test-watcher/build.gradle b/qa/smoke-test-watcher/build.gradle index f5a67548dbc..f17e3f8ecd6 100644 --- a/qa/smoke-test-watcher/build.gradle +++ b/qa/smoke-test-watcher/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') } ext { @@ -18,7 +18,7 @@ ext { } integTestCluster { - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.security.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.ml.enabled', 'false' diff --git a/qa/transport-client-tests/build.gradle b/qa/transport-client-tests/build.gradle index 0cdcc6e2513..9b718c0dde3 100644 --- a/qa/transport-client-tests/build.gradle +++ b/qa/transport-client-tests/build.gradle @@ -2,11 +2,11 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' dependencies { - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:transport-client', configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') } integTestCluster { setting 'xpack.security.enabled', 'false' - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path } diff --git a/qa/tribe-tests-with-license/build.gradle b/qa/tribe-tests-with-license/build.gradle index 5b2245647ab..69430667e2c 100644 --- a/qa/tribe-tests-with-license/build.gradle +++ b/qa/tribe-tests-with-license/build.gradle @@ -8,19 +8,19 @@ apply plugin: 'elasticsearch.rest-test' dependencies { testCompile project(path: ':modules:tribe', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'testArtifacts') + testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') // TODO: remove all these test deps, this is completely bogus, guava is being force upgraded - testCompile project(path: ':x-pack-elasticsearch:plugin:deprecation', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:graph', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:logstash', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:ml', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:monitoring', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:upgrade', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:watcher', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin:monitoring', configuration: 'testArtifacts') + testCompile project(path: xpackModule('deprecation'), configuration: 'runtime') + testCompile project(path: xpackModule('graph'), configuration: 'runtime') + testCompile project(path: xpackModule('logstash'), configuration: 'runtime') + testCompile project(path: xpackModule('ml'), configuration: 'runtime') + testCompile project(path: xpackModule('monitoring'), configuration: 'runtime') + testCompile project(path: xpackModule('security'), configuration: 'runtime') + testCompile project(path: xpackModule('upgrade'), configuration: 'runtime') + testCompile project(path: xpackModule('watcher'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('monitoring'), configuration: 'testArtifacts') } compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" @@ -35,7 +35,7 @@ test { include '**/*Tests.class' } -String licensePath = project(':x-pack-elasticsearch:license-tools').projectDir.toPath().resolve('src/test/resources').toString() +String licensePath = xpackProject('license-tools').projectDir.toPath().resolve('src/test/resources').toString() sourceSets { test { resources { @@ -53,7 +53,7 @@ ClusterConfiguration cluster1Config = new ClusterConfiguration(project) cluster1Config.clusterName = 'cluster1' cluster1Config.setting('node.name', 'cluster1-node1') // x-pack -cluster1Config.plugin(':x-pack-elasticsearch:plugin') +cluster1Config.plugin(xpackProject('plugin').path) cluster1Config.setting('xpack.monitoring.enabled', false) cluster1Config.setting('xpack.security.enabled', false) cluster1Config.setting('xpack.watcher.enabled', false) @@ -67,7 +67,7 @@ ClusterConfiguration cluster2Config = new ClusterConfiguration(project) cluster2Config.clusterName = 'cluster2' cluster2Config.setting('node.name', 'cluster2-node1') // x-pack -cluster2Config.plugin(':x-pack-elasticsearch:plugin') +cluster2Config.plugin(xpackProject('plugin').path) cluster2Config.setting('xpack.monitoring.enabled', false) cluster2Config.setting('xpack.monitoring.enabled', false) cluster2Config.setting('xpack.security.enabled', false) @@ -100,7 +100,7 @@ integTestCluster { setting 'tribe.cluster2.xpack.graph.enabled', false setting 'tribe.cluster2.xpack.ml.enabled', false // x-pack - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setting 'xpack.monitoring.enabled', false setting 'xpack.monitoring.enabled', false setting 'xpack.security.enabled', false diff --git a/qa/tribe-tests-with-security/build.gradle b/qa/tribe-tests-with-security/build.gradle index 18889359e0f..798adcfdc04 100644 --- a/qa/tribe-tests-with-security/build.gradle +++ b/qa/tribe-tests-with-security/build.gradle @@ -8,9 +8,9 @@ apply plugin: 'elasticsearch.rest-test' dependencies { testCompile project(path: ':modules:tribe', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'runtime') - testCompile project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'testArtifacts') - testCompile project(path: ':x-pack-elasticsearch:plugin:security', configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: ':modules:analysis-common', configuration: 'runtime') } @@ -18,7 +18,7 @@ namingConventions.skipIntegTestInDisguise = true compileTestJava.options.compilerArgs << "-Xlint:-try" -String xpackPath = project(':x-pack-elasticsearch:plugin:core').projectDir.toPath().resolve('src/test/resources').toString() +String xpackPath = project(xpackModule('core')).projectDir.toPath().resolve('src/test/resources').toString() sourceSets { test { resources { @@ -40,13 +40,13 @@ configOne.clusterName = 'cluster1' configOne.setting('node.name', 'cluster1-node1') configOne.setting('xpack.monitoring.enabled', false) configOne.setting('xpack.ml.enabled', false) -configOne.plugin(':x-pack-elasticsearch:plugin') +configOne.plugin(xpackProject('plugin').path) configOne.module(project.project(':modules:analysis-common')) configOne.setupCommand('setupDummyUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser') configOne.waitCondition = { node, ant -> File tmpFile = new File(node.cwd, 'wait.success') - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=1&wait_for_status=yellow", + ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=1&wait_for_status=yellow&timeout=60s", dest: tmpFile.toString(), username: 'test_user', password: 'x-pack-test-password', @@ -62,13 +62,13 @@ configTwo.clusterName = 'cluster2' configTwo.setting('node.name', 'cluster2-node1') configTwo.setting('xpack.monitoring.enabled', false) configTwo.setting('xpack.ml.enabled', false) -configTwo.plugin(':x-pack-elasticsearch:plugin') +configTwo.plugin(xpackProject('plugin').path) configTwo.module(project.project(':modules:analysis-common')) configTwo.setupCommand('setupDummyUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser') configTwo.waitCondition = { node, ant -> File tmpFile = new File(node.cwd, 'wait.success') - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=1&wait_for_status=yellow", + ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=1&wait_for_status=yellow&timeout=60s", dest: tmpFile.toString(), username: 'test_user', password: 'x-pack-test-password', @@ -80,7 +80,7 @@ List cluster2Nodes = ClusterFormationTasks.setup(project, 'clusterTwo' integTestCluster { dependsOn setupClusterOne, setupClusterTwo - plugin ':x-pack-elasticsearch:plugin' + plugin xpackProject('plugin').path setupCommand 'setupDummyUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' setting 'xpack.monitoring.enabled', false @@ -99,7 +99,7 @@ integTestCluster { waitCondition = { node, ant -> File tmpFile = new File(node.cwd, 'wait.success') // 5 nodes: tribe + clusterOne (1 node + tribe internal node) + clusterTwo (1 node + tribe internal node) - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=5&wait_for_status=yellow", + ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=5&wait_for_status=yellow&timeout=60s", dest: tmpFile.toString(), username: 'test_user', password: 'x-pack-test-password', diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 5dce18ffefd..32eee470390 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -1,3 +1,8 @@ +import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin +import org.elasticsearch.gradle.plugin.MetaPluginPropertiesExtension +import org.elasticsearch.gradle.plugin.PluginBuildPlugin +import org.elasticsearch.gradle.plugin.PluginPropertiesExtension + apply plugin: 'elasticsearch.vagrantsupport' apply plugin: 'elasticsearch.vagrant' @@ -6,8 +11,41 @@ esvagrant { } dependencies { - bats project(path: ':x-pack-elasticsearch:plugin:core', configuration: 'zip') + // Packaging tests use the x-pack meta plugin + bats project(path: xpackProject('plugin').path, configuration: 'zip') // Inherit Bats test utils from :qa:vagrant project bats project(path: ':qa:vagrant', configuration: 'bats') } + +Map> metaPlugins = [:] +for (Project metaPlugin : project.rootProject.subprojects) { + if (metaPlugin.plugins.hasPlugin(MetaPluginBuildPlugin)) { + MetaPluginPropertiesExtension extension = metaPlugin.extensions.findByName('es_meta_plugin') + if (extension != null) { + List plugins = [] + metaPlugin.subprojects.each { + if (extension.plugins.contains(it.name)) { + Project plugin = (Project) it + if (plugin.plugins.hasPlugin(PluginBuildPlugin)) { + PluginPropertiesExtension esplugin = plugin.extensions.findByName('esplugin') + if (esplugin != null) { + plugins.add(esplugin.name) + } + } + } + } + metaPlugins.put(extension.name, plugins.toSorted()) + } + } +} + +setupBats { + doLast { + metaPlugins.each{ name, plugins -> + File expectedMetaPlugins = file("build/plugins/${name}.expected") + expectedMetaPlugins.parentFile.mkdirs() + expectedMetaPlugins.setText(plugins.join('\n'), 'UTF-8') + } + } +} diff --git a/qa/vagrant/src/test/resources/packaging/utils/xpack.bash b/qa/vagrant/src/test/resources/packaging/utils/xpack.bash index f138d9a16f5..6fa11e680b8 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/xpack.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/xpack.bash @@ -5,37 +5,81 @@ # you may not use this file except in compliance with the Elastic License. install_xpack() { - install_and_check_plugin x pack x-pack-core-*.jar x-pack-graph-*.jar x-pack-ml-*.jar \ - x-pack-monitoring-*.jar x-pack-security-*.jar x-pack-watcher-*.jar + install_meta_plugin x-pack } # Checks that X-Pack files are correctly installed verify_xpack_installation() { + local name="x-pack" local user="$ESPLUGIN_COMMAND_USER" local group="$ESPLUGIN_COMMAND_USER" - assert_file "$ESHOME/bin/x-pack" d $user $group 755 - assert_file "$ESHOME/bin/x-pack/certgen" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/croneval" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/extension" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/migrate" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/saml-metadata" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/setup-passwords" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/sql-cli" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/syskeygen" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/users" f $user $group 755 - assert_file "$ESHOME/bin/x-pack/x-pack-env" f $user $group 755 - assert_number_of_files "$ESHOME/bin/x-pack/" 24 + # Verify binary files + assert_file "$ESHOME/bin/$name" d $user $group 755 + local binaryFiles=( + 'certgen' + 'certgen.bat' + 'certutil' + 'certutil.bat' + 'croneval' + 'croneval.bat' + 'extension' + 'extension.bat' + 'migrate' + 'migrate.bat' + 'saml-metadata' + 'saml-metadata.bat' + 'setup-passwords' + 'setup-passwords.bat' + 'sql-cli' + 'sql-cli.bat' + 'syskeygen' + 'syskeygen.bat' + 'users' + 'users.bat' + 'x-pack-env' + 'x-pack-env.bat' + 'x-pack-security-env' + 'x-pack-security-env.bat' + 'x-pack-watcher-env' + 'x-pack-watcher-env.bat' + ) - assert_file "$ESCONFIG/x-pack" d $user elasticsearch 750 - assert_file "$ESCONFIG/x-pack/users" f $user elasticsearch 660 - assert_file "$ESCONFIG/x-pack/users_roles" f $user elasticsearch 660 - assert_file "$ESCONFIG/x-pack/roles.yml" f $user elasticsearch 660 - assert_file "$ESCONFIG/x-pack/role_mapping.yml" f $user elasticsearch 660 - assert_file "$ESCONFIG/x-pack/log4j2.properties" f $user elasticsearch 660 - assert_number_of_files "$ESCONFIG/x-pack" 5 + local binaryFilesCount=0 + for binaryFile in ${binaryFiles[@]}; do + assert_file "$ESHOME/bin/$name/${binaryFile}" f $user $group 755 + binaryFilesCount=$(( binaryFilesCount + 1 )) + done + assert_number_of_files "$ESHOME/bin/$name/" $binaryFilesCount + # Verify config files + assert_file "$ESCONFIG/$name" d $user elasticsearch 750 + local configFiles=( + 'users' + 'users_roles' + 'roles.yml' + 'role_mapping.yml' + 'log4j2.properties' + ) + + local configFilesCount=0 + for configFile in ${configFiles[@]}; do + assert_file "$ESCONFIG/$name/${configFile}" f $user elasticsearch 660 + configFilesCount=$(( configFilesCount + 1 )) + done + assert_number_of_files "$ESCONFIG/$name/" $configFilesCount + + # Verify keystore creation assert_file "$ESCONFIG/elasticsearch.keystore" f $user elasticsearch 660 + + # Read the $name.expected file that contains all the expected + # plugins for the meta plugin + while read plugin; do + assert_module_or_plugin_directory "$ESPLUGINS/$name/$plugin" + assert_file_exist "$ESPLUGINS/$name/$plugin/$plugin"*".jar" + assert_file_exist "$ESPLUGINS/$name/$plugin/plugin-descriptor.properties" + assert_file_exist "$ESPLUGINS/$name/$plugin/plugin-security.policy" + done