diff --git a/gradle.properties b/gradle.properties index 6b1823d86a6..2511c740bb5 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1 +1,2 @@ org.gradle.daemon=false +org.gradle.jvmargs=-Xmx1536m diff --git a/plugin/build.gradle b/plugin/build.gradle index ab3c4bc81f9..5de5a40f0b4 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -47,6 +47,7 @@ dependencyLicenses { ignoreSha 'shared-proto' ignoreSha 'elasticsearch-rest-client-sniffer' ignoreSha 'aggs-matrix-stats' + ignoreSha 'x-pack-core' } licenseHeaders { @@ -71,6 +72,9 @@ dependencies { // CLI deps compile project(path: ':core:cli', configuration: 'runtime') + // Core project deps (this is temporary) + compile project(':x-pack-elasticsearch:plugin:core') + // security deps compile project(path: ':modules:transport-netty4', configuration: 'runtime') compile 'com.unboundid:unboundid-ldapsdk:3.2.0' diff --git a/plugin/core/build.gradle b/plugin/core/build.gradle index 12d72c3197d..b79df34e746 100644 --- a/plugin/core/build.gradle +++ b/plugin/core/build.gradle @@ -6,9 +6,16 @@ dependencies { archivesBaseName = 'x-pack-core' +compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" +//compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked" + // TODO: enable this once we have tests test.enabled=false -licenseHeaders.enabled = false + +licenseHeaders { + approvedLicenses << 'BCrypt (BSD-like)' + additionalLicense 'BCRYP', 'BCrypt (BSD-like)', 'Copyright (c) 2006 Damien Miller ' +} parent.bundlePlugin { from jar diff --git a/plugin/src/main/java/org/elasticsearch/license/DateUtils.java b/plugin/core/src/main/java/org/elasticsearch/license/DateUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/DateUtils.java rename to plugin/core/src/main/java/org/elasticsearch/license/DateUtils.java diff --git a/plugin/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java b/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java rename to plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java diff --git a/plugin/src/main/java/org/elasticsearch/license/DeleteLicenseRequest.java b/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/DeleteLicenseRequest.java rename to plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/license/DeleteLicenseResponse.java b/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/DeleteLicenseResponse.java rename to plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/license/GetLicenseAction.java b/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/GetLicenseAction.java rename to plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java diff --git a/plugin/src/main/java/org/elasticsearch/license/GetLicenseRequest.java b/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/GetLicenseRequest.java rename to plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/license/GetLicenseRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/GetLicenseRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/license/GetLicenseResponse.java b/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/GetLicenseResponse.java rename to plugin/core/src/main/java/org/elasticsearch/license/GetLicenseResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java b/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java rename to plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java diff --git a/plugin/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java b/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java rename to plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/license/GetTrialStatusRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/GetTrialStatusRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java b/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java rename to plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/license/License.java b/plugin/core/src/main/java/org/elasticsearch/license/License.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/License.java rename to plugin/core/src/main/java/org/elasticsearch/license/License.java diff --git a/plugin/src/main/java/org/elasticsearch/license/LicensesStatus.java b/plugin/core/src/main/java/org/elasticsearch/license/LicensesStatus.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/LicensesStatus.java rename to plugin/core/src/main/java/org/elasticsearch/license/LicensesStatus.java diff --git a/plugin/src/main/java/org/elasticsearch/license/OperationModeFileWatcher.java b/plugin/core/src/main/java/org/elasticsearch/license/OperationModeFileWatcher.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/OperationModeFileWatcher.java rename to plugin/core/src/main/java/org/elasticsearch/license/OperationModeFileWatcher.java diff --git a/plugin/src/main/java/org/elasticsearch/license/PostStartTrialAction.java b/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/PostStartTrialAction.java rename to plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java diff --git a/plugin/src/main/java/org/elasticsearch/license/PostStartTrialRequest.java b/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/PostStartTrialRequest.java rename to plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/license/PostStartTrialRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/PostStartTrialRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java b/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java rename to plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/license/PutLicenseAction.java b/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/PutLicenseAction.java rename to plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java diff --git a/plugin/src/main/java/org/elasticsearch/license/PutLicenseRequest.java b/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/PutLicenseRequest.java rename to plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/license/PutLicenseResponse.java b/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/PutLicenseResponse.java rename to plugin/core/src/main/java/org/elasticsearch/license/PutLicenseResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/license/XPackInfoResponse.java b/plugin/core/src/main/java/org/elasticsearch/license/XPackInfoResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/license/XPackInfoResponse.java rename to plugin/core/src/main/java/org/elasticsearch/license/XPackInfoResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java similarity index 91% rename from plugin/src/main/java/org/elasticsearch/license/XPackLicenseState.java rename to plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index c5a74bc53d3..b90207dab34 100644 --- a/plugin/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -9,8 +9,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.License.OperationMode; -import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.monitoring.Monitoring; +import org.elasticsearch.xpack.XpackField; +import org.elasticsearch.xpack.monitoring.MonitoringField; import java.util.Collections; import java.util.LinkedHashMap; @@ -29,35 +29,35 @@ public class XPackLicenseState { static final Map EXPIRATION_MESSAGES; static { Map messages = new LinkedHashMap<>(); - messages.put(XPackPlugin.SECURITY, new String[] { + messages.put(XpackField.SECURITY, new String[] { "Cluster health, cluster stats and indices stats operations are blocked", "All data operations (read and write) continue to work" }); - messages.put(XPackPlugin.WATCHER, new String[] { + messages.put(XpackField.WATCHER, new String[] { "PUT / GET watch APIs are disabled, DELETE watch API continues to work", "Watches execute and write to the history", "The actions of the watches don't execute" }); - messages.put(XPackPlugin.MONITORING, new String[] { + messages.put(XpackField.MONITORING, new String[] { "The agent will stop collecting cluster and indices metrics", "The agent will stop automatically cleaning indices older than [xpack.monitoring.history.duration]" }); - messages.put(XPackPlugin.GRAPH, new String[] { + messages.put(XpackField.GRAPH, new String[] { "Graph explore APIs are disabled" }); - messages.put(XPackPlugin.MACHINE_LEARNING, new String[] { + messages.put(XpackField.MACHINE_LEARNING, new String[] { "Machine learning APIs are disabled" }); - messages.put(XPackPlugin.LOGSTASH, new String[] { + messages.put(XpackField.LOGSTASH, new String[] { "Logstash will continue to poll centrally-managed pipelines" }); - messages.put(XPackPlugin.DEPRECATION, new String[] { + messages.put(XpackField.DEPRECATION, new String[] { "Deprecation APIs are disabled" }); - messages.put(XPackPlugin.UPGRADE, new String[] { + messages.put(XpackField.UPGRADE, new String[] { "Upgrade API is disabled" }); - messages.put(XPackPlugin.SQL, new String[] { + messages.put(XpackField.SQL, new String[] { "SQL support is disabled" }); EXPIRATION_MESSAGES = Collections.unmodifiableMap(messages); @@ -70,13 +70,13 @@ public class XPackLicenseState { static final Map> ACKNOWLEDGMENT_MESSAGES; static { Map> messages = new LinkedHashMap<>(); - messages.put(XPackPlugin.SECURITY, XPackLicenseState::securityAcknowledgementMessages); - messages.put(XPackPlugin.WATCHER, XPackLicenseState::watcherAcknowledgementMessages); - messages.put(XPackPlugin.MONITORING, XPackLicenseState::monitoringAcknowledgementMessages); - messages.put(XPackPlugin.GRAPH, XPackLicenseState::graphAcknowledgementMessages); - messages.put(XPackPlugin.MACHINE_LEARNING, XPackLicenseState::machineLearningAcknowledgementMessages); - messages.put(XPackPlugin.LOGSTASH, XPackLicenseState::logstashAcknowledgementMessages); - messages.put(XPackPlugin.SQL, XPackLicenseState::sqlAcknowledgementMessages); + messages.put(XpackField.SECURITY, XPackLicenseState::securityAcknowledgementMessages); + messages.put(XpackField.WATCHER, XPackLicenseState::watcherAcknowledgementMessages); + messages.put(XpackField.MONITORING, XPackLicenseState::monitoringAcknowledgementMessages); + messages.put(XpackField.GRAPH, XPackLicenseState::graphAcknowledgementMessages); + messages.put(XpackField.MACHINE_LEARNING, XPackLicenseState::machineLearningAcknowledgementMessages); + messages.put(XpackField.LOGSTASH, XPackLicenseState::logstashAcknowledgementMessages); + messages.put(XpackField.SQL, XPackLicenseState::sqlAcknowledgementMessages); ACKNOWLEDGMENT_MESSAGES = Collections.unmodifiableMap(messages); } @@ -91,7 +91,7 @@ public class XPackLicenseState { return new String[] { "The following X-Pack security functionality will be disabled: authentication, authorization, " + "ip filtering, and auditing. Please restart your node after applying the license.", - "Field and document level access control will be disabled.", + "ThrottlerField and document level access control will be disabled.", "Custom realms will be ignored." }; } @@ -104,7 +104,7 @@ public class XPackLicenseState { case TRIAL: case PLATINUM: return new String[] { - "Field and document level access control will be disabled.", + "ThrottlerField and document level access control will be disabled.", "Custom realms will be ignored." }; } @@ -119,7 +119,7 @@ public class XPackLicenseState { return new String[] { "Authentication will be limited to the native realms.", "IP filtering and auditing will be disabled.", - "Field and document level access control will be disabled.", + "ThrottlerField and document level access control will be disabled.", "Custom realms will be ignored." }; } @@ -159,7 +159,7 @@ public class XPackLicenseState { newMode, newMode, newMode), LoggerMessageFormat.format( "Automatic index cleanup is locked to {} days for clusters with [{}] license.", - Monitoring.HISTORY_DURATION.getDefault(Settings.EMPTY).days(), newMode) + MonitoringField.HISTORY_DURATION.getDefault(Settings.EMPTY).days(), newMode) }; } break; @@ -307,7 +307,7 @@ public class XPackLicenseState { } /** - * Determine if Document Level Security (DLS) and Field Level Security (FLS) should be enabled. + * Determine if Document Level Security (DLS) and ThrottlerField Level Security (FLS) should be enabled. *

* DLS and FLS are only disabled when the mode is not: *

    diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ClientHelper.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ClientHelper.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ClientHelper.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ClientHelper.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/XPackBuild.java b/plugin/core/src/main/java/org/elasticsearch/xpack/XPackBuild.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/XPackBuild.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/XPackBuild.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/XPackClientActionPlugin.java b/plugin/core/src/main/java/org/elasticsearch/xpack/XPackClientActionPlugin.java new file mode 100644 index 00000000000..c2fb63920ac --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/XPackClientActionPlugin.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack; + +import org.elasticsearch.common.settings.Settings; + +public interface XPackClientActionPlugin { + + static boolean isTribeNode(Settings settings) { + return settings.getGroups("tribe", true).isEmpty() == false; + } + + static boolean isTribeClientNode(Settings settings) { + return settings.get("tribe.name") != null; + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/XPackFeatureSet.java b/plugin/core/src/main/java/org/elasticsearch/xpack/XPackFeatureSet.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/XPackFeatureSet.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/XPackFeatureSet.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/XPackSettings.java b/plugin/core/src/main/java/org/elasticsearch/xpack/XPackSettings.java similarity index 95% rename from plugin/src/main/java/org/elasticsearch/xpack/XPackSettings.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/XPackSettings.java index 5dd6308d51e..242d6719884 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/XPackSettings.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/XPackSettings.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.xpack.security.Security; +import org.elasticsearch.xpack.security.SecurityField; import org.elasticsearch.xpack.ssl.SSLClientAuth; import org.elasticsearch.xpack.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.ssl.VerificationMode; @@ -30,7 +30,7 @@ public class XPackSettings { /** Setting for enabling or disabling monitoring. Defaults to true if not a tribe node. */ public static final Setting MONITORING_ENABLED = Setting.boolSetting("xpack.monitoring.enabled", // By default, monitoring is disabled on tribe nodes - s -> String.valueOf(XPackPlugin.isTribeNode(s) == false && XPackPlugin.isTribeClientNode(s) == false), + s -> String.valueOf(XPackClientActionPlugin.isTribeNode(s) == false && XPackClientActionPlugin.isTribeClientNode(s) == false), Setting.Property.NodeScope); /** Setting for enabling or disabling watcher. Defaults to true. */ @@ -116,11 +116,11 @@ public class XPackSettings { private static final SSLConfigurationSettings GLOBAL_SSL = SSLConfigurationSettings.withPrefix(GLOBAL_SSL_PREFIX); // http specific settings - public static final String HTTP_SSL_PREFIX = Security.setting("http.ssl."); + public static final String HTTP_SSL_PREFIX = SecurityField.setting("http.ssl."); private static final SSLConfigurationSettings HTTP_SSL = SSLConfigurationSettings.withPrefix(HTTP_SSL_PREFIX); // transport specific settings - public static final String TRANSPORT_SSL_PREFIX = Security.setting("transport.ssl."); + public static final String TRANSPORT_SSL_PREFIX = SecurityField.setting("transport.ssl."); private static final SSLConfigurationSettings TRANSPORT_SSL = SSLConfigurationSettings.withPrefix(TRANSPORT_SSL_PREFIX); /** Returns all settings created in {@link XPackSettings}. */ diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/XpackField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/XpackField.java new file mode 100644 index 00000000000..3ef62462644 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/XpackField.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack; + +public final class XpackField { + // These should be moved back to XPackPlugin once its moved to common + public static final String NAME = "x-pack"; + /** Name constant for the security feature. */ + public static final String SECURITY = "security"; + /** Name constant for the monitoring feature. */ + public static final String MONITORING = "monitoring"; + /** Name constant for the watcher feature. */ + public static final String WATCHER = "watcher"; + /** Name constant for the graph feature. */ + public static final String GRAPH = "graph"; + /** Name constant for the machine learning feature. */ + public static final String MACHINE_LEARNING = "ml"; + /** Name constant for the Logstash feature. */ + public static final String LOGSTASH = "logstash"; + /** Name constant for the Deprecation API feature. */ + public static final String DEPRECATION = "deprecation"; + /** Name constant for the upgrade feature. */ + public static final String UPGRADE = "upgrade"; + // inside of YAML settings we still use xpack do not having handle issues with dashes + public static final String SETTINGS_NAME = "xpack"; + /** Name constant for the sql feature. */ + public static final String SQL = "sql"; + + private XpackField() {} + + public static String featureSettingPrefix(String featureName) { + return XpackField.SETTINGS_NAME + "." + featureName; + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/action/XPackInfoAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackInfoAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/action/XPackInfoAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackInfoAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/action/XPackInfoRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackInfoRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/action/XPackInfoRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackInfoRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/action/XPackInfoRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackInfoRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/action/XPackInfoRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackInfoRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/action/XPackUsageAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackUsageAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/action/XPackUsageAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackUsageAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/action/XPackUsageRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackUsageRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/action/XPackUsageRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackUsageRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/action/XPackUsageRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackUsageRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/action/XPackUsageRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackUsageRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/action/XPackUsageResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackUsageResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/action/XPackUsageResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/action/XPackUsageResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java similarity index 66% rename from plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 9fbbf892298..1976e57d1e7 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -6,42 +6,23 @@ package org.elasticsearch.xpack.deprecation; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.license.LicenseUtils; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.XPackPlugin; import java.io.IOException; import java.util.Arrays; @@ -49,16 +30,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.xpack.ClientHelper.DEPRECATION_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.CLUSTER_SETTINGS_CHECKS; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.NODE_SETTINGS_CHECKS; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.filterChecks; public class DeprecationInfoAction extends Action { @@ -70,6 +47,18 @@ public class DeprecationInfoAction extends Action The signature of the check (BiFunction, Function, including the appropriate arguments) + * @return The list of {@link DeprecationIssue} that were found in the cluster + */ + static List filterChecks(List checks, Function mapper) { + return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList()); + } + @Override public RequestBuilder newRequestBuilder(ElasticsearchClient client) { return new RequestBuilder(client, this); @@ -276,69 +265,4 @@ public class DeprecationInfoAction extends Action { - - private final XPackLicenseState licenseState; - private final NodeClient client; - private final IndexNameExpressionResolver indexNameExpressionResolver; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - XPackLicenseState licenseState, NodeClient client) { - super(settings, DeprecationInfoAction.NAME, transportService, clusterService, threadPool, actionFilters, - Request::new, indexNameExpressionResolver); - this.licenseState = licenseState; - this.client = client; - this.indexNameExpressionResolver = indexNameExpressionResolver; - } - - @Override - protected String executor() { - return ThreadPool.Names.GENERIC; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - // Cluster is not affected but we look up repositories in metadata - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); - } - - @Override - protected final void masterOperation(final Request request, ClusterState state, final ActionListener listener) { - if (licenseState.isDeprecationAllowed()) { - NodesInfoRequest nodesInfoRequest = new NodesInfoRequest("_local").settings(true).plugins(true); - NodesStatsRequest nodesStatsRequest = new NodesStatsRequest("_local").fs(true); - - final ThreadContext threadContext = client.threadPool().getThreadContext(); - executeAsyncWithOrigin(threadContext, DEPRECATION_ORIGIN, nodesInfoRequest, ActionListener.wrap( - nodesInfoResponse -> { - if (nodesInfoResponse.hasFailures()) { - throw nodesInfoResponse.failures().get(0); - } - executeAsyncWithOrigin(threadContext, DEPRECATION_ORIGIN, nodesStatsRequest, - ActionListener.wrap( - nodesStatsResponse -> { - if (nodesStatsResponse.hasFailures()) { - throw nodesStatsResponse.failures().get(0); - } - listener.onResponse(Response.from(nodesInfoResponse.getNodes(), - nodesStatsResponse.getNodes(), state, indexNameExpressionResolver, - request.indices(), request.indicesOptions(), - CLUSTER_SETTINGS_CHECKS, NODE_SETTINGS_CHECKS, - INDEX_SETTINGS_CHECKS)); - }, listener::onFailure), - client.admin().cluster()::nodesStats); - }, listener::onFailure), client.admin().cluster()::nodesInfo); - } else { - listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.DEPRECATION)); - } - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationIssue.java b/plugin/core/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationIssue.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationIssue.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationIssue.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/Connection.java b/plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/Connection.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/graph/action/Connection.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/Connection.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/GraphExploreResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/Hop.java b/plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/Hop.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/graph/action/Hop.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/Hop.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/Vertex.java b/plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/Vertex.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/graph/action/Vertex.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/Vertex.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/VertexRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/VertexRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/graph/action/VertexRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/graph/action/VertexRequest.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MLMetadataField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MLMetadataField.java new file mode 100644 index 00000000000..670350059e8 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MLMetadataField.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml; + +public final class MLMetadataField { + + public static final String TYPE = "ml"; + + private MLMetadataField() {} + + /** + * Namespaces the task ids for datafeeds. + * A job id can be used as a datafeed id, because they are stored separately in cluster state. + */ + public static String datafeedTaskId(String datafeedId) { + return "datafeed-" + datafeedId; + } +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MachineLearningClientActionPlugin.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MachineLearningClientActionPlugin.java new file mode 100644 index 00000000000..972b4661b08 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MachineLearningClientActionPlugin.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; + +public interface MachineLearningClientActionPlugin { + + Setting MAX_MODEL_MEMORY_LIMIT = + Setting.memorySizeSetting("xpack.ml.max_model_memory_limit", new ByteSizeValue(0), + Setting.Property.Dynamic, Setting.Property.NodeScope); + + TimeValue STATE_PERSIST_RESTORE_TIMEOUT = TimeValue.timeValueMinutes(30); +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java similarity index 87% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java index aca1acd4b3b..186c768ecbb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java @@ -9,18 +9,16 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.xpack.ClientHelper; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.security.authc.Authentication; -import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.elasticsearch.xpack.security.authc.AuthenticationField; +import org.elasticsearch.xpack.security.authc.AuthenticationServiceField; import java.util.Map; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; - /** * A helper class for actions which decides if we should run via the _xpack user and set ML as origin * or if we should use the run_as functionality by setting the correct headers @@ -30,8 +28,8 @@ public class MlClientHelper { /** * List of headers that are related to security */ - public static final Set SECURITY_HEADER_FILTERS = Sets.newHashSet(AuthenticationService.RUN_AS_USER_HEADER, - Authentication.AUTHENTICATION_KEY); + public static final Set SECURITY_HEADER_FILTERS = Sets.newHashSet(AuthenticationServiceField.RUN_AS_USER_HEADER, + AuthenticationField.AUTHENTICATION_KEY); /** * Execute a client operation and return the response, try to run a datafeed search with least privileges, when headers exist @@ -56,7 +54,8 @@ public class MlClientHelper { public static T execute(Map headers, Client client, Supplier supplier) { // no headers, we will have to use the xpack internal user for our execution by specifying the ml origin if (headers == null || headers.isEmpty()) { - try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + try (ThreadContext.StoredContext ignore = ClientHelper.stashWithOrigin(client.threadPool().getThreadContext(), + ClientHelper.ML_ORIGIN)) { return supplier.get(); } } else { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetaIndex.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlMetaIndex.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetaIndex.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlMetaIndex.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java similarity index 97% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java index d6bf90936f6..7dc81a32102 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java @@ -56,7 +56,6 @@ public class MlMetadata implements MetaData.Custom { private static final ParseField JOBS_FIELD = new ParseField("jobs"); private static final ParseField DATAFEEDS_FIELD = new ParseField("datafeeds"); - public static final String TYPE = "ml"; public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(), Collections.emptySortedMap()); // This parser follows the pattern that metadata is parsed leniently (to allow for enhancements) public static final ObjectParser METADATA_PARSER = new ObjectParser<>("ml_metadata", true, Builder::new); @@ -118,7 +117,7 @@ public class MlMetadata implements MetaData.Custom { @Override public String getWriteableName() { - return TYPE; + return MLMetadataField.TYPE; } @Override @@ -212,7 +211,7 @@ public class MlMetadata implements MetaData.Custom { @Override public String getWriteableName() { - return TYPE; + return MLMetadataField.TYPE; } static Diff readJobDiffFrom(StreamInput in) throws IOException { @@ -361,7 +360,7 @@ public class MlMetadata implements MetaData.Custom { private void checkDatafeedIsStopped(Supplier msg, String datafeedId, PersistentTasksCustomMetaData persistentTasks) { if (persistentTasks != null) { - if (persistentTasks.getTask(datafeedTaskId(datafeedId)) != null) { + if (persistentTasks.getTask(MLMetadataField.datafeedTaskId(datafeedId)) != null) { throw ExceptionsHelper.conflictStatusException(msg.get()); } } @@ -435,20 +434,12 @@ public class MlMetadata implements MetaData.Custom { return tasks.getTask(jobTaskId(jobId)); } - /** - * Namespaces the task ids for datafeeds. - * A job id can be used as a datafeed id, because they are stored separately in cluster state. - */ - public static String datafeedTaskId(String datafeedId) { - return "datafeed-" + datafeedId; - } - @Nullable public static PersistentTask getDatafeedTask(String datafeedId, @Nullable PersistentTasksCustomMetaData tasks) { if (tasks == null) { return null; } - return tasks.getTask(datafeedTaskId(datafeedId)); + return tasks.getTask(MLMetadataField.datafeedTaskId(datafeedId)); } public static JobState getJobState(String jobId, @Nullable PersistentTasksCustomMetaData tasks) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlParserType.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlParserType.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/MlParserType.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/MlParserType.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java new file mode 100644 index 00000000000..95571204997 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java @@ -0,0 +1,272 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; +import org.elasticsearch.xpack.ml.job.config.Job; + +import java.io.IOException; +import java.util.Objects; + +public class CloseJobAction extends Action { + + public static final CloseJobAction INSTANCE = new CloseJobAction(); + public static final String NAME = "cluster:admin/xpack/ml/job/close"; + + private CloseJobAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends BaseTasksRequest implements ToXContentObject { + + public static final ParseField TIMEOUT = new ParseField("timeout"); + public static final ParseField FORCE = new ParseField("force"); + public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs"); + public static ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); + + static { + PARSER.declareString(Request::setJobId, Job.ID); + PARSER.declareString((request, val) -> + request.setCloseTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); + PARSER.declareBoolean(Request::setForce, FORCE); + PARSER.declareBoolean(Request::setAllowNoJobs, ALLOW_NO_JOBS); + } + + public static Request parseRequest(String jobId, XContentParser parser) { + Request request = PARSER.apply(parser, null); + if (jobId != null) { + request.setJobId(jobId); + } + return request; + } + + private String jobId; + private boolean force = false; + private boolean allowNoJobs = true; + // A big state can take a while to persist. For symmetry with the _open endpoint any + // changes here should be reflected there too. + private TimeValue timeout = MachineLearningClientActionPlugin.STATE_PERSIST_RESTORE_TIMEOUT; + + private String[] openJobIds; + + private boolean local; + + Request() { + openJobIds = new String[] {}; + } + + public Request(String jobId) { + this(); + this.jobId = jobId; + } + + public String getJobId() { + return jobId; + } + + public void setJobId(String jobId) { + this.jobId = jobId; + } + + public TimeValue getCloseTimeout() { + return timeout; + } + + public void setCloseTimeout(TimeValue timeout) { + this.timeout = timeout; + } + + public boolean isForce() { + return force; + } + + public void setForce(boolean force) { + this.force = force; + } + + public boolean allowNoJobs() { + return allowNoJobs; + } + + public void setAllowNoJobs(boolean allowNoJobs) { + this.allowNoJobs = allowNoJobs; + } + + public boolean isLocal() { return local; } + + public void setLocal(boolean local) { + this.local = local; + } + + public String[] getOpenJobIds() { return openJobIds; } + + public void setOpenJobIds(String [] openJobIds) { + this.openJobIds = openJobIds; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobId = in.readString(); + timeout = new TimeValue(in); + force = in.readBoolean(); + openJobIds = in.readStringArray(); + local = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_6_1_0)) { + allowNoJobs = in.readBoolean(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(jobId); + timeout.writeTo(out); + out.writeBoolean(force); + out.writeStringArray(openJobIds); + out.writeBoolean(local); + if (out.getVersion().onOrAfter(Version.V_6_1_0)) { + out.writeBoolean(allowNoJobs); + } + } + + @Override + public boolean match(Task task) { + for (String id : openJobIds) { + if (OpenJobAction.JobTaskMatcher.match(task, id)) { + return true; + } + } + return false; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + // openJobIds are excluded + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); + builder.field(FORCE.getPreferredName(), force); + builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + // openJobIds are excluded + return Objects.hash(jobId, timeout, force, allowNoJobs); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Request other = (Request) obj; + // openJobIds are excluded + return Objects.equals(jobId, other.jobId) && + Objects.equals(timeout, other.timeout) && + Objects.equals(force, other.force) && + Objects.equals(allowNoJobs, other.allowNoJobs); + } + } + + static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client, CloseJobAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { + + private boolean closed; + + Response() { + super(null, null); + + } + + Response(StreamInput in) throws IOException { + super(null, null); + readFrom(in); + } + + Response(boolean closed) { + super(null, null); + this.closed = closed; + } + + public boolean isClosed() { + return closed; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + closed = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(closed); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("closed", closed); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return closed == response.closed; + } + + @Override + public int hashCode() { + return Objects.hash(closed); + } + } + +} + diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java similarity index 51% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java index 1fd089d9685..f45d4515d73 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java @@ -5,40 +5,20 @@ */ package org.elasticsearch.xpack.ml.action; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.calendars.Calendar; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; - public class DeleteCalendarAction extends Action { @@ -138,47 +118,4 @@ public class DeleteCalendarAction extends Action { - - private final Client client; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Client client) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.client = client; - } - - @Override - protected void doExecute(DeleteCalendarAction.Request request, ActionListener listener) { - - final String calendarId = request.getCalendarId(); - - DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, Calendar.documentId(calendarId)); - - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - bulkRequestBuilder.add(deleteRequest); - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), - new ActionListener() { - @Override - public void onResponse(BulkResponse bulkResponse) { - if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { - listener.onFailure(new ResourceNotFoundException("Could not delete calendar with ID [" + calendarId - + "] because it does not exist")); - } else { - listener.onResponse(new Response(true)); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(ExceptionsHelper.serverError("Could not delete calendar with ID [" + calendarId + "]", e)); - } - }); - } - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java new file mode 100644 index 00000000000..80f72721eda --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteDatafeedAction extends Action { + + public static final DeleteDatafeedAction INSTANCE = new DeleteDatafeedAction(); + public static final String NAME = "cluster:admin/xpack/ml/datafeeds/delete"; + + private DeleteDatafeedAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends AcknowledgedRequest implements ToXContentFragment { + + public static final ParseField FORCE = new ParseField("force"); + + private String datafeedId; + private boolean force; + + public Request(String datafeedId) { + this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); + } + + Request() { + } + + public String getDatafeedId() { + return datafeedId; + } + + public boolean isForce() { + return force; + } + + public void setForce(boolean force) { + this.force = force; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + datafeedId = in.readString(); + if (in.getVersion().onOrAfter(Version.V_5_5_0)) { + force = in.readBoolean(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(datafeedId); + if (out.getVersion().onOrAfter(Version.V_5_5_0)) { + out.writeBoolean(force); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request other = (Request) o; + return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(force, other.force); + } + + @Override + public int hashCode() { + return Objects.hash(datafeedId, force); + } + } + + public static class RequestBuilder extends MasterNodeOperationRequestBuilder { + + public RequestBuilder(ElasticsearchClient client, DeleteDatafeedAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends AcknowledgedResponse { + + Response() { + } + + Response(boolean acknowledged) { + super(acknowledged); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java similarity index 50% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java index fa15915af63..65c2ae5849e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java @@ -6,38 +6,18 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.job.retention.ExpiredForecastsRemover; -import org.elasticsearch.xpack.ml.job.retention.ExpiredModelSnapshotsRemover; -import org.elasticsearch.xpack.ml.job.retention.ExpiredResultsRemover; -import org.elasticsearch.xpack.ml.job.retention.MlDataRemover; -import org.elasticsearch.xpack.ml.notifications.Auditor; -import org.elasticsearch.xpack.ml.utils.VolatileCursorIterator; import java.io.IOException; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; import java.util.Objects; public class DeleteExpiredDataAction extends Action { - - private final Client client; - private final ClusterService clusterService; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client, ClusterService clusterService) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.client = client; - this.clusterService = clusterService; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - logger.info("Deleting expired data"); - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> deleteExpiredData(listener)); - } - - private void deleteExpiredData(ActionListener listener) { - Auditor auditor = new Auditor(client, clusterService); - List dataRemovers = Arrays.asList( - new ExpiredResultsRemover(client, clusterService, auditor), - new ExpiredForecastsRemover(client), - new ExpiredModelSnapshotsRemover(client, clusterService) - ); - Iterator dataRemoversIterator = new VolatileCursorIterator<>(dataRemovers); - deleteExpiredData(dataRemoversIterator, listener); - } - - private void deleteExpiredData(Iterator mlDataRemoversIterator, ActionListener listener) { - if (mlDataRemoversIterator.hasNext()) { - MlDataRemover remover = mlDataRemoversIterator.next(); - remover.remove(ActionListener.wrap( - booleanResponse -> deleteExpiredData(mlDataRemoversIterator, listener), - listener::onFailure)); - } else { - logger.info("Completed deletion of expired data"); - listener.onResponse(new Response(true)); - } - } - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java new file mode 100644 index 00000000000..b136e65e89b --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Objects; + + +public class DeleteFilterAction extends Action { + + public static final DeleteFilterAction INSTANCE = new DeleteFilterAction(); + public static final String NAME = "cluster:admin/xpack/ml/filters/delete"; + + private DeleteFilterAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends AcknowledgedRequest { + + public static final ParseField FILTER_ID = new ParseField("filter_id"); + + private String filterId; + + Request() { + + } + + public Request(String filterId) { + this.filterId = ExceptionsHelper.requireNonNull(filterId, FILTER_ID.getPreferredName()); + } + + public String getFilterId() { + return filterId; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + filterId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(filterId); + } + + @Override + public int hashCode() { + return Objects.hash(filterId); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(filterId, other.filterId); + } + } + + public static class RequestBuilder extends MasterNodeOperationRequestBuilder { + + public RequestBuilder(ElasticsearchClient client, DeleteFilterAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends AcknowledgedResponse { + + public Response(boolean acknowledged) { + super(acknowledged); + } + + private Response() {} + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } + } + +} + diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java new file mode 100644 index 00000000000..a6da6d576f2 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.persistence.JobStorageDeletionTask; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteJobAction extends Action { + + public static final DeleteJobAction INSTANCE = new DeleteJobAction(); + public static final String NAME = "cluster:admin/xpack/ml/job/delete"; + + private DeleteJobAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends AcknowledgedRequest { + + private String jobId; + private boolean force; + + public Request(String jobId) { + this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); + } + + Request() {} + + public String getJobId() { + return jobId; + } + + public void setJobId(String jobId) { + this.jobId = jobId; + } + + public boolean isForce() { + return force; + } + + public void setForce(boolean force) { + this.force = force; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId) { + return new JobStorageDeletionTask(id, type, action, "delete-job-" + jobId, parentTaskId); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobId = in.readString(); + if (in.getVersion().onOrAfter(Version.V_5_5_0)) { + force = in.readBoolean(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(jobId); + if (out.getVersion().onOrAfter(Version.V_5_5_0)) { + out.writeBoolean(force); + } + } + + @Override + public int hashCode() { + return Objects.hash(jobId, force); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + DeleteJobAction.Request other = (DeleteJobAction.Request) obj; + return Objects.equals(jobId, other.jobId) && Objects.equals(force, other.force); + } + } + + static class RequestBuilder extends MasterNodeOperationRequestBuilder { + + RequestBuilder(ElasticsearchClient client, DeleteJobAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends AcknowledgedResponse { + + public Response(boolean acknowledged) { + super(acknowledged); + } + + Response() {} + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } + } + +} \ No newline at end of file diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java new file mode 100644 index 00000000000..f19fc013020 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshotField; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; + +public class DeleteModelSnapshotAction extends Action { + + public static final DeleteModelSnapshotAction INSTANCE = new DeleteModelSnapshotAction(); + public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/delete"; + + private DeleteModelSnapshotAction() { + super(NAME); + } + + @Override + public DeleteModelSnapshotAction.RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public DeleteModelSnapshotAction.Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest { + + private String jobId; + private String snapshotId; + + Request() { + } + + public Request(String jobId, String snapshotId) { + this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); + this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, ModelSnapshotField.SNAPSHOT_ID.getPreferredName()); + } + + public String getJobId() { + return jobId; + } + + public String getSnapshotId() { + return snapshotId; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobId = in.readString(); + snapshotId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(jobId); + out.writeString(snapshotId); + } + } + + public static class Response extends AcknowledgedResponse { + + public Response(boolean acknowledged) { + super(acknowledged); + } + + private Response() {} + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } + + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client, DeleteModelSnapshotAction action) { + super(client, action, new Request()); + } + } + +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/FinalizeJobExecutionAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/FinalizeJobExecutionAction.java new file mode 100644 index 00000000000..93b276401e2 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/FinalizeJobExecutionAction.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class FinalizeJobExecutionAction extends Action { + + public static final FinalizeJobExecutionAction INSTANCE = new FinalizeJobExecutionAction(); + public static final String NAME = "cluster:internal/xpack/ml/job/finalize_job_execution"; + + private FinalizeJobExecutionAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, INSTANCE); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends MasterNodeRequest { + + private String[] jobIds; + + public Request(String[] jobIds) { + this.jobIds = jobIds; + } + + Request() { + } + + public String[] getJobIds() { + return jobIds; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobIds = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(jobIds); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + } + + public static class RequestBuilder + extends MasterNodeOperationRequestBuilder { + + public RequestBuilder(ElasticsearchClient client, FinalizeJobExecutionAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends AcknowledgedResponse { + + Response(boolean acknowledged) { + super(acknowledged); + } + + Response() { + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/FlushJobAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/FlushJobAction.java similarity index 74% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/FlushJobAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/FlushJobAction.java index 337fdbba875..ad984045b2f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/FlushJobAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/FlushJobAction.java @@ -7,31 +7,20 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.job.process.autodetect.output.FlushAcknowledgement; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; import java.io.IOException; import java.util.Date; @@ -56,7 +45,7 @@ public class FlushJobAction extends Action implements ToXContentObject { + public static class Request extends JobTaskRequest implements ToXContentObject { public static final ParseField CALC_INTERIM = new ParseField("calc_interim"); public static final ParseField START = new ParseField("start"); @@ -279,50 +268,6 @@ public class FlushJobAction extends Action { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - AutodetectProcessManager processManager) { - super(settings, FlushJobAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - FlushJobAction.Request::new, FlushJobAction.Response::new, ThreadPool.Names.SAME, processManager); - // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread - } - - @Override - protected FlushJobAction.Response readTaskResponse(StreamInput in) throws IOException { - Response response = new Response(); - response.readFrom(in); - return response; - } - - @Override - protected void taskOperation(Request request, OpenJobAction.JobTask task, ActionListener listener) { - FlushJobParams.Builder paramsBuilder = FlushJobParams.builder(); - paramsBuilder.calcInterim(request.getCalcInterim()); - if (request.getAdvanceTime() != null) { - paramsBuilder.advanceTime(request.getAdvanceTime()); - } - if (request.getSkipTime() != null) { - paramsBuilder.skipTime(request.getSkipTime()); - } - TimeRange.Builder timeRangeBuilder = TimeRange.builder(); - if (request.getStart() != null) { - timeRangeBuilder.startTime(request.getStart()); - } - if (request.getEnd() != null) { - timeRangeBuilder.endTime(request.getEnd()); - } - paramsBuilder.forTimeRange(timeRangeBuilder.build()); - processManager.flushJob(task, paramsBuilder.build(), ActionListener.wrap( - flushAcknowledgement -> { - listener.onResponse(new Response(true, - flushAcknowledgement == null ? null : flushAcknowledgement.getLastFinalizedBucketEnd())); - }, listener::onFailure - )); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ForecastJobAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ForecastJobAction.java similarity index 65% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/ForecastJobAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ForecastJobAction.java index 7785e146420..35e89b5ece8 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ForecastJobAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ForecastJobAction.java @@ -5,42 +5,27 @@ */ package org.elasticsearch.xpack.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.ForecastParams; import org.elasticsearch.xpack.ml.job.results.Forecast; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xpack.ml.action.ForecastJobAction.Request.DURATION; - -public class ForecastJobAction extends Action { +public class ForecastJobAction extends Action { public static final ForecastJobAction INSTANCE = new ForecastJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/forecast"; @@ -59,7 +44,7 @@ public class ForecastJobAction extends Action implements ToXContentObject { + public static class Request extends JobTaskRequest implements ToXContentObject { public static final ParseField DURATION = new ParseField("duration"); public static final ParseField EXPIRES_IN = new ParseField("expires_in"); @@ -108,8 +93,8 @@ public class ForecastJobAction extends Action 0) { - throw new IllegalArgumentException("[" + DURATION.getPreferredName() + "] must be " + MAX_DURATION.getStringRep() - + " or less: [" + duration.getStringRep() + "]"); + throw new IllegalArgumentException("[" + DURATION.getPreferredName() + "] must be " + + MAX_DURATION.getStringRep() + " or less: [" + duration.getStringRep() + "]"); } } @@ -247,69 +232,5 @@ public class ForecastJobAction extends Action { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - AutodetectProcessManager processManager) { - super(settings, ForecastJobAction.NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, ForecastJobAction.Request::new, ForecastJobAction.Response::new, ThreadPool.Names.SAME, - processManager); - // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread - } - - @Override - protected ForecastJobAction.Response readTaskResponse(StreamInput in) throws IOException { - Response response = new Response(); - response.readFrom(in); - return response; - } - - @Override - protected void taskOperation(Request request, OpenJobAction.JobTask task, ActionListener listener) { - ClusterState state = clusterService.state(); - Job job = JobManager.getJobOrThrowIfUnknown(task.getJobId(), state); - validate(job, request); - - ForecastParams.Builder paramsBuilder = ForecastParams.builder(); - - if (request.getDuration() != null) { - paramsBuilder.duration(request.getDuration()); - } - - if (request.getExpiresIn() != null) { - paramsBuilder.expiresIn(request.getExpiresIn()); - } - - ForecastParams params = paramsBuilder.build(); - processManager.forecastJob(task, params, e -> { - if (e == null) { - listener.onResponse(new Response(true, params.getForecastId())); - } else { - listener.onFailure(e); - } - }); - } - - static void validate(Job job, Request request) { - if (job.getJobVersion() == null || job.getJobVersion().before(Version.V_6_1_0)) { - throw ExceptionsHelper.badRequestException( - "Cannot run forecast because jobs created prior to version 6.1 are not supported"); - } - - if (request.getDuration() != null) { - TimeValue duration = request.getDuration(); - TimeValue bucketSpan = job.getAnalysisConfig().getBucketSpan(); - - if (duration.compareTo(bucketSpan) < 0) { - throw ExceptionsHelper.badRequestException( - "[" + DURATION.getPreferredName() + "] must be greater or equal to the bucket span: [" + duration.getStringRep() - + "/" + bucketSpan.getStringRep() + "]"); - } - } - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetBucketsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetBucketsAction.java similarity index 83% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetBucketsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetBucketsAction.java index b80449b246d..2560c1b18a2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetBucketsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetBucketsAction.java @@ -7,34 +7,22 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.action.util.PageParams; import org.elasticsearch.xpack.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.results.Bucket; import org.elasticsearch.xpack.ml.job.results.Result; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -371,47 +359,4 @@ public class GetBucketsAction extends Action { - - private final JobProvider jobProvider; - private final JobManager jobManager; - private final Client client; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - JobProvider jobProvider, JobManager jobManager, Client client) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.jobProvider = jobProvider; - this.jobManager = jobManager; - this.client = client; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - jobManager.getJobOrThrowIfUnknown(request.getJobId()); - - BucketsQueryBuilder query = - new BucketsQueryBuilder().expand(request.expand) - .includeInterim(request.excludeInterim == false) - .start(request.start) - .end(request.end) - .anomalyScoreThreshold(request.anomalyScore) - .sortField(request.sort) - .sortDescending(request.descending); - - if (request.pageParams != null) { - query.from(request.pageParams.getFrom()) - .size(request.pageParams.getSize()); - } - if (request.timestamp != null) { - query.timestamp(request.timestamp); - } else { - query.start(request.start); - query.end(request.end); - } - jobProvider.buckets(request.jobId, query, q -> listener.onResponse(new Response(q)), listener::onFailure, client); - } - } - } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarEventsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarEventsAction.java similarity index 77% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarEventsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarEventsAction.java index b9e3c43635f..1f596070a4c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarEventsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarEventsAction.java @@ -258,62 +258,4 @@ public class GetCalendarEventsAction extends Action { - - private final JobProvider jobProvider; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - JobProvider jobProvider) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.jobProvider = jobProvider; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - ActionListener calendarExistsListener = ActionListener.wrap( - r -> { - SpecialEventsQueryBuilder query = new SpecialEventsQueryBuilder() - .after(request.getAfter()) - .before(request.getBefore()) - .from(request.getPageParams().getFrom()) - .size(request.getPageParams().getSize()); - - if (GetCalendarsAction.Request.ALL.equals(request.getCalendarId()) == false) { - query.calendarIds(Collections.singletonList(request.getCalendarId())); - } - - ActionListener> eventsListener = ActionListener.wrap( - events -> { - listener.onResponse(new Response(events)); - }, - listener::onFailure - ); - - if (request.getJobId() != null) { - jobProvider.specialEventsForJob(request.getJobId(), query, eventsListener); - } else { - jobProvider.specialEvents(query, eventsListener); - } - }, - listener::onFailure); - - checkCalendarExists(request.getCalendarId(), calendarExistsListener); - } - - private void checkCalendarExists(String calendarId, ActionListener listener) { - if (GetCalendarsAction.Request.ALL.equals(calendarId)) { - listener.onResponse(true); - return; - } - - jobProvider.calendar(calendarId, ActionListener.wrap( - c -> listener.onResponse(true), - listener::onFailure - )); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java similarity index 71% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java index 138840d7798..261ec0d2070 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java @@ -6,36 +6,25 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.action.util.PageParams; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.calendars.Calendar; -import org.elasticsearch.xpack.ml.job.persistence.CalendarQueryBuilder; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import java.io.IOException; -import java.util.Collections; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -227,54 +216,4 @@ public class GetCalendarsAction extends Action { - - private final JobProvider jobProvider; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - JobProvider jobProvider) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.jobProvider = jobProvider; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - final String calendarId = request.getCalendarId(); - if (request.getCalendarId() != null && Request.ALL.equals(request.getCalendarId()) == false) { - getCalendar(calendarId, listener); - } else { - PageParams pageParams = request.getPageParams(); - if (pageParams == null) { - pageParams = PageParams.defaultParams(); - } - getCalendars(pageParams, listener); - } - } - - private void getCalendar(String calendarId, ActionListener listener) { - - jobProvider.calendar(calendarId, ActionListener.wrap( - calendar -> { - QueryPage page = new QueryPage<>(Collections.singletonList(calendar), 1, Calendar.RESULTS_FIELD); - listener.onResponse(new Response(page)); - }, - listener::onFailure - )); - } - - private void getCalendars(PageParams pageParams, ActionListener listener) { - CalendarQueryBuilder query = new CalendarQueryBuilder().pageParams(pageParams).sort(true); - jobProvider.calendars(query, ActionListener.wrap( - calendars -> { - listener.onResponse(new Response(calendars)); - }, - listener::onFailure - )); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCategoriesAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCategoriesAction.java similarity index 80% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCategoriesAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCategoriesAction.java index 60d74434981..a76106abf25 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCategoriesAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetCategoriesAction.java @@ -6,32 +6,21 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.action.util.PageParams; import org.elasticsearch.xpack.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -93,6 +82,12 @@ Action { - - private final JobProvider jobProvider; - private final Client client; - private final JobManager jobManager; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider, Client client, JobManager jobManager) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.jobProvider = jobProvider; - this.client = client; - this.jobManager = jobManager; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - jobManager.getJobOrThrowIfUnknown(request.jobId); - - Integer from = request.pageParams != null ? request.pageParams.getFrom() : null; - Integer size = request.pageParams != null ? request.pageParams.getSize() : null; - jobProvider.categoryDefinitions(request.jobId, request.categoryId, from, size, - r -> listener.onResponse(new Response(r)), listener::onFailure, client); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsAction.java similarity index 68% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsAction.java index 99531e4f609..8f0d33df3ad 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsAction.java @@ -7,39 +7,23 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Objects; -import java.util.Set; public class GetDatafeedsAction extends Action { @@ -200,45 +184,4 @@ public class GetDatafeedsAction extends Action { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, GetDatafeedsAction.NAME, transportService, clusterService, threadPool, actionFilters, - Request::new, indexNameExpressionResolver); - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { - logger.debug("Get datafeed '{}'", request.getDatafeedId()); - - MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE); - if (mlMetadata == null) { - mlMetadata = MlMetadata.EMPTY_METADATA; - } - Set expandedDatafeedIds = mlMetadata.expandDatafeedIds(request.getDatafeedId(), request.allowNoDatafeeds()); - List datafeedConfigs = new ArrayList<>(); - for (String expandedDatafeedId : expandedDatafeedIds) { - datafeedConfigs.add(mlMetadata.getDatafeed(expandedDatafeedId)); - } - - listener.onResponse(new Response(new QueryPage<>(datafeedConfigs, datafeedConfigs.size(), DatafeedConfig.RESULTS_FIELD))); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsStatsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsStatsAction.java similarity index 72% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsStatsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsStatsAction.java index 1dd4671e2bb..f10d927c3de 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsStatsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetDatafeedsStatsAction.java @@ -7,47 +7,28 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction.Response.DatafeedStats; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; import java.io.IOException; -import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; public class GetDatafeedsStatsAction extends Action { @@ -301,63 +282,4 @@ public class GetDatafeedsStatsAction extends Action { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, GetDatafeedsStatsAction.NAME, transportService, clusterService, threadPool, actionFilters, - Request::new, indexNameExpressionResolver); - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, - ActionListener listener) throws Exception { - logger.debug("Get stats for datafeed '{}'", request.getDatafeedId()); - - MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE); - if (mlMetadata == null) { - mlMetadata = MlMetadata.EMPTY_METADATA; - } - - Set expandedDatafeedIds = mlMetadata.expandDatafeedIds(request.getDatafeedId(), request.allowNoDatafeeds()); - - PersistentTasksCustomMetaData tasksInProgress = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - List results = expandedDatafeedIds.stream() - .map(datafeedId -> getDatafeedStats(datafeedId, state, tasksInProgress)) - .collect(Collectors.toList()); - QueryPage statsPage = new QueryPage<>(results, results.size(), - DatafeedConfig.RESULTS_FIELD); - listener.onResponse(new Response(statsPage)); - } - - private static DatafeedStats getDatafeedStats(String datafeedId, ClusterState state, - PersistentTasksCustomMetaData tasks) { - PersistentTask task = MlMetadata.getDatafeedTask(datafeedId, tasks); - DatafeedState datafeedState = MlMetadata.getDatafeedState(datafeedId, tasks); - DiscoveryNode node = null; - String explanation = null; - if (task != null) { - node = state.nodes().get(task.getExecutorNode()); - explanation = task.getAssignment().getExplanation(); - } - return new DatafeedStats(datafeedId, datafeedState, node, explanation); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); - } - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetFiltersAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetFiltersAction.java new file mode 100644 index 00000000000..a4df4dcda1b --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetFiltersAction.java @@ -0,0 +1,185 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.StatusToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.ml.action.util.PageParams; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.job.config.MlFilter; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + + +public class GetFiltersAction extends Action { + + public static final GetFiltersAction INSTANCE = new GetFiltersAction(); + public static final String NAME = "cluster:admin/xpack/ml/filters/get"; + + private GetFiltersAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest { + + private String filterId; + private PageParams pageParams; + + public Request() { + } + + public void setFilterId(String filterId) { + this.filterId = filterId; + } + + public String getFilterId() { + return filterId; + } + + public PageParams getPageParams() { + return pageParams; + } + + public void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (pageParams != null && filterId != null) { + validationException = addValidationError("Params [" + PageParams.FROM.getPreferredName() + + ", " + PageParams.SIZE.getPreferredName() + "] are incompatible with [" + + MlFilter.ID.getPreferredName() + "]", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + filterId = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(filterId); + } + + @Override + public int hashCode() { + return Objects.hash(filterId); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(filterId, other.filterId); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } + + public static class Response extends ActionResponse implements StatusToXContentObject { + + private QueryPage filters; + + public Response(QueryPage filters) { + this.filters = filters; + } + + Response() { + } + + public QueryPage getFilters() { + return filters; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + filters = new QueryPage<>(in, MlFilter::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + filters.writeTo(out); + } + + @Override + public RestStatus status() { + return RestStatus.OK; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + filters.doXContentBody(builder, params); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(filters); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(filters, other.filters); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } + +} + diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetInfluencersAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetInfluencersAction.java similarity index 82% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetInfluencersAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetInfluencersAction.java index 577ce51b560..e6543186910 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetInfluencersAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetInfluencersAction.java @@ -6,34 +6,22 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.action.util.PageParams; import org.elasticsearch.xpack.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.results.Influencer; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -301,30 +289,4 @@ extends Action { - - private final JobProvider jobProvider; - private final Client client; - private final JobManager jobManager; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider, Client client, JobManager jobManager) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.jobProvider = jobProvider; - this.client = client; - this.jobManager = jobManager; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - jobManager.getJobOrThrowIfUnknown(request.jobId); - - InfluencersQueryBuilder.InfluencersQuery query = new InfluencersQueryBuilder().includeInterim(request.excludeInterim == false) - .start(request.start).end(request.end).from(request.pageParams.getFrom()).size(request.pageParams.getSize()) - .influencerScoreThreshold(request.influencerScore).sortField(request.sort).sortDescending(request.descending).build(); - jobProvider.influencers(request.jobId, query, page -> listener.onResponse(new Response(page)), listener::onFailure, client); - } - } - } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsAction.java similarity index 71% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsAction.java index d936dfa6d1c..6d9df4770ae 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsAction.java @@ -7,31 +7,18 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -195,39 +182,4 @@ public class GetJobsAction extends Action { - - private final JobManager jobManager; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - JobManager jobManager) { - super(settings, GetJobsAction.NAME, transportService, clusterService, threadPool, actionFilters, - Request::new, indexNameExpressionResolver); - this.jobManager = jobManager; - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { - logger.debug("Get job '{}'", request.getJobId()); - QueryPage jobs = jobManager.expandJobs(request.getJobId(), request.allowNoJobs(), state); - listener.onResponse(new Response(jobs)); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsStatsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsStatsAction.java similarity index 59% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsStatsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsStatsAction.java index aa08716ef58..4fc3e14759a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsStatsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetJobsStatsAction.java @@ -7,62 +7,37 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; -import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobState; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; import java.io.IOException; -import java.time.Duration; -import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiConsumer; -import java.util.function.Consumer; -import java.util.stream.Collectors; public class GetJobsStatsAction extends Action { @@ -105,6 +80,10 @@ public class GetJobsStatsAction extends Action getExpandedJobsIds() { return expandedJobsIds; } + + public void setExpandedJobsIds(List expandedJobsIds) { this.expandedJobsIds = expandedJobsIds; } + public void setAllowNoJobs(boolean allowNoJobs) { this.allowNoJobs = allowNoJobs; } @@ -119,7 +98,7 @@ public class GetJobsStatsAction extends Action> { - - private final ClusterService clusterService; - private final AutodetectProcessManager processManager; - private final JobProvider jobProvider; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, - ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, - AutodetectProcessManager processManager, JobProvider jobProvider) { - super(settings, GetJobsStatsAction.NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, Request::new, Response::new, ThreadPool.Names.MANAGEMENT); - this.clusterService = clusterService; - this.processManager = processManager; - this.jobProvider = jobProvider; - } - - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - MlMetadata clusterMlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE); - MlMetadata mlMetadata = (clusterMlMetadata == null) ? MlMetadata.EMPTY_METADATA : clusterMlMetadata; - request.expandedJobsIds = new ArrayList<>(mlMetadata.expandJobIds(request.getJobId(), request.allowNoJobs())); - ActionListener finalListener = listener; - listener = ActionListener.wrap(response -> gatherStatsForClosedJobs(mlMetadata, - request, response, finalListener), listener::onFailure); - super.doExecute(task, request, listener); - } - - @Override - protected Response newResponse(Request request, List> tasks, - List taskOperationFailures, - List failedNodeExceptions) { - List stats = new ArrayList<>(); - for (QueryPage task : tasks) { - stats.addAll(task.results()); - } - return new Response(taskOperationFailures, failedNodeExceptions, new QueryPage<>(stats, stats.size(), Job.RESULTS_FIELD)); - } - - @Override - protected QueryPage readTaskResponse(StreamInput in) throws IOException { - return new QueryPage<>(in, Response.JobStats::new); - } - - @Override - protected void taskOperation(Request request, OpenJobAction.JobTask task, - ActionListener> listener) { - String jobId = task.getJobId(); - logger.debug("Get stats for job [{}]", jobId); - ClusterState state = clusterService.state(); - PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - Optional> stats = processManager.getStatistics(task); - if (stats.isPresent()) { - PersistentTask pTask = MlMetadata.getJobTask(jobId, tasks); - DiscoveryNode node = state.nodes().get(pTask.getExecutorNode()); - JobState jobState = MlMetadata.getJobState(jobId, tasks); - String assignmentExplanation = pTask.getAssignment().getExplanation(); - TimeValue openTime = durationToTimeValue(processManager.jobOpenTime(task)); - Response.JobStats jobStats = new Response.JobStats(jobId, stats.get().v1(), stats.get().v2(), jobState, - node, assignmentExplanation, openTime); - listener.onResponse(new QueryPage<>(Collections.singletonList(jobStats), 1, Job.RESULTS_FIELD)); - } else { - listener.onResponse(new QueryPage<>(Collections.emptyList(), 0, Job.RESULTS_FIELD)); - } - } - - // Up until now we gathered the stats for jobs that were open, - // This method will fetch the stats for missing jobs, that was stored in the jobs index - void gatherStatsForClosedJobs(MlMetadata mlMetadata, Request request, Response response, - ActionListener listener) { - List jobIds = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, - request.expandedJobsIds, response.jobsStats.results()); - if (jobIds.isEmpty()) { - listener.onResponse(response); - return; - } - - AtomicInteger counter = new AtomicInteger(jobIds.size()); - AtomicArray jobStats = new AtomicArray<>(jobIds.size()); - PersistentTasksCustomMetaData tasks = clusterService.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - for (int i = 0; i < jobIds.size(); i++) { - int slot = i; - String jobId = jobIds.get(i); - gatherDataCountsAndModelSizeStats(jobId, (dataCounts, modelSizeStats) -> { - JobState jobState = MlMetadata.getJobState(jobId, tasks); - PersistentTasksCustomMetaData.PersistentTask pTask = MlMetadata.getJobTask(jobId, tasks); - String assignmentExplanation = null; - if (pTask != null) { - assignmentExplanation = pTask.getAssignment().getExplanation(); - } - jobStats.set(slot, new Response.JobStats(jobId, dataCounts, modelSizeStats, jobState, null, - assignmentExplanation, null)); - if (counter.decrementAndGet() == 0) { - List results = response.getResponse().results(); - results.addAll(jobStats.asList()); - listener.onResponse(new Response(response.getTaskFailures(), response.getNodeFailures(), - new QueryPage<>(results, results.size(), Job.RESULTS_FIELD))); - } - }, listener::onFailure); - } - } - - void gatherDataCountsAndModelSizeStats(String jobId, BiConsumer handler, - Consumer errorHandler) { - jobProvider.dataCounts(jobId, dataCounts -> { - jobProvider.modelSizeStats(jobId, modelSizeStats -> { - handler.accept(dataCounts, modelSizeStats); - }, errorHandler); - }, errorHandler); - } - - static TimeValue durationToTimeValue(Optional duration) { - if (duration.isPresent()) { - return TimeValue.timeValueSeconds(duration.get().getSeconds()); - } else { - return null; - } - } - - static List determineNonDeletedJobIdsWithoutLiveStats(MlMetadata mlMetadata, - List requestedJobIds, - List stats) { - Set excludeJobIds = stats.stream().map(Response.JobStats::getJobId).collect(Collectors.toSet()); - return requestedJobIds.stream().filter(jobId -> !excludeJobIds.contains(jobId) && - !mlMetadata.isJobDeleted(jobId)).collect(Collectors.toList()); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetModelSnapshotsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetModelSnapshotsAction.java similarity index 78% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetModelSnapshotsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetModelSnapshotsAction.java index 81e8b2c31df..f1540469ca3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetModelSnapshotsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetModelSnapshotsAction.java @@ -6,39 +6,28 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.action.util.PageParams; import org.elasticsearch.xpack.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; -import java.util.stream.Collectors; public class GetModelSnapshotsAction extends Action { @@ -298,42 +287,4 @@ extends Action { - - private final JobProvider jobProvider; - private final JobManager jobManager; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider, JobManager jobManager) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.jobProvider = jobProvider; - this.jobManager = jobManager; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - logger.debug("Get model snapshots for job {} snapshot ID {}. from = {}, size = {}" - + " start = '{}', end='{}', sort={} descending={}", - request.getJobId(), request.getSnapshotId(), request.pageParams.getFrom(), request.pageParams.getSize(), - request.getStart(), request.getEnd(), request.getSort(), request.getDescOrder()); - - jobManager.getJobOrThrowIfUnknown(request.getJobId()); - - jobProvider.modelSnapshots(request.getJobId(), request.pageParams.getFrom(), request.pageParams.getSize(), - request.getStart(), request.getEnd(), request.getSort(), request.getDescOrder(), request.getSnapshotId(), - page -> { - listener.onResponse(new Response(clearQuantiles(page))); - }, listener::onFailure); - } - - public static QueryPage clearQuantiles(QueryPage page) { - if (page.results() == null) { - return page; - } - return new QueryPage<>(page.results().stream().map(snapshot -> - new ModelSnapshot.Builder(snapshot).setQuantiles(null).build()) - .collect(Collectors.toList()), page.count(), page.getResultsField()); - } - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetOverallBucketsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetOverallBucketsAction.java new file mode 100644 index 00000000000..bce6e5a124b --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetOverallBucketsAction.java @@ -0,0 +1,349 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.job.results.OverallBucket; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.Objects; +import java.util.function.LongSupplier; + +/** + *

    + * This action returns summarized bucket results over multiple jobs. + * Overall buckets have the span of the largest job's bucket_span. + * Their score is calculated by finding the max anomaly score per job + * and then averaging the top N. + *

    + *

    + * Overall buckets can be optionally aggregated into larger intervals + * by setting the bucket_span parameter. When that is the case, the + * overall_score is the max of the overall buckets that are within + * the interval. + *

    + */ +public class GetOverallBucketsAction + extends Action { + + public static final GetOverallBucketsAction INSTANCE = new GetOverallBucketsAction(); + public static final String NAME = "cluster:monitor/xpack/ml/job/results/overall_buckets/get"; + + private GetOverallBucketsAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + public static final ParseField TOP_N = new ParseField("top_n"); + public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); + public static final ParseField OVERALL_SCORE = new ParseField("overall_score"); + public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); + public static final ParseField START = new ParseField("start"); + public static final ParseField END = new ParseField("end"); + public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs"); + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); + + static { + PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); + PARSER.declareInt(Request::setTopN, TOP_N); + PARSER.declareString(Request::setBucketSpan, BUCKET_SPAN); + PARSER.declareDouble(Request::setOverallScore, OVERALL_SCORE); + PARSER.declareBoolean(Request::setExcludeInterim, EXCLUDE_INTERIM); + PARSER.declareString((request, startTime) -> request.setStart(parseDateOrThrow( + startTime, START, System::currentTimeMillis)), START); + PARSER.declareString((request, endTime) -> request.setEnd(parseDateOrThrow( + endTime, END, System::currentTimeMillis)), END); + PARSER.declareBoolean(Request::setAllowNoJobs, ALLOW_NO_JOBS); + } + + static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now) { + DateMathParser dateMathParser = new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); + + try { + return dateMathParser.parse(date, now); + } catch (Exception e) { + String msg = Messages.getMessage(Messages.REST_INVALID_DATETIME_PARAMS, paramName.getPreferredName(), date); + throw new ElasticsearchParseException(msg, e); + } + } + + public static Request parseRequest(String jobId, XContentParser parser) { + Request request = PARSER.apply(parser, null); + if (jobId != null) { + request.jobId = jobId; + } + return request; + } + + private String jobId; + private int topN = 1; + private TimeValue bucketSpan; + private double overallScore = 0.0; + private boolean excludeInterim = false; + private Long start; + private Long end; + private boolean allowNoJobs = true; + + Request() { + } + + public Request(String jobId) { + this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); + } + + public String getJobId() { + return jobId; + } + + public int getTopN() { + return topN; + } + + public void setTopN(int topN) { + if (topN <= 0) { + throw new IllegalArgumentException("[topN] parameter must be positive, found [" + topN + "]"); + } + this.topN = topN; + } + + public TimeValue getBucketSpan() { + return bucketSpan; + } + + public void setBucketSpan(TimeValue bucketSpan) { + this.bucketSpan = bucketSpan; + } + + public void setBucketSpan(String bucketSpan) { + this.bucketSpan = TimeValue.parseTimeValue(bucketSpan, BUCKET_SPAN.getPreferredName()); + } + + public double getOverallScore() { + return overallScore; + } + + public void setOverallScore(double overallScore) { + this.overallScore = overallScore; + } + + public boolean isExcludeInterim() { + return excludeInterim; + } + + public void setExcludeInterim(boolean excludeInterim) { + this.excludeInterim = excludeInterim; + } + + public Long getStart() { + return start; + } + + public void setStart(Long start) { + this.start = start; + } + + public void setStart(String start) { + setStart(parseDateOrThrow(start, START, System::currentTimeMillis)); + } + + public Long getEnd() { + return end; + } + + public void setEnd(Long end) { + this.end = end; + } + + public void setEnd(String end) { + setEnd(parseDateOrThrow(end, END, System::currentTimeMillis)); + } + + public boolean allowNoJobs() { + return allowNoJobs; + } + + public void setAllowNoJobs(boolean allowNoJobs) { + this.allowNoJobs = allowNoJobs; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobId = in.readString(); + topN = in.readVInt(); + bucketSpan = in.readOptionalWriteable(TimeValue::new); + overallScore = in.readDouble(); + excludeInterim = in.readBoolean(); + start = in.readOptionalLong(); + end = in.readOptionalLong(); + allowNoJobs = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(jobId); + out.writeVInt(topN); + out.writeOptionalWriteable(bucketSpan); + out.writeDouble(overallScore); + out.writeBoolean(excludeInterim); + out.writeOptionalLong(start); + out.writeOptionalLong(end); + out.writeBoolean(allowNoJobs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(TOP_N.getPreferredName(), topN); + if (bucketSpan != null) { + builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); + } + builder.field(OVERALL_SCORE.getPreferredName(), overallScore); + builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); + if (start != null) { + builder.field(START.getPreferredName(), String.valueOf(start)); + } + if (end != null) { + builder.field(END.getPreferredName(), String.valueOf(end)); + } + builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, topN, bucketSpan, overallScore, excludeInterim, start, end, allowNoJobs); + } + + @Override + public boolean equals(Object other) { + if (other == null) { + return false; + } + if (getClass() != other.getClass()) { + return false; + } + Request that = (Request) other; + return Objects.equals(jobId, that.jobId) && + this.topN == that.topN && + Objects.equals(bucketSpan, that.bucketSpan) && + this.excludeInterim == that.excludeInterim && + this.overallScore == that.overallScore && + Objects.equals(start, that.start) && + Objects.equals(end, that.end) && + this.allowNoJobs == that.allowNoJobs; + } + } + + static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private QueryPage overallBuckets; + + Response() { + overallBuckets = new QueryPage<>(Collections.emptyList(), 0, OverallBucket.RESULTS_FIELD); + } + + Response(QueryPage overallBuckets) { + this.overallBuckets = overallBuckets; + } + + public QueryPage getOverallBuckets() { + return overallBuckets; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + overallBuckets = new QueryPage<>(in, OverallBucket::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + overallBuckets.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + overallBuckets.doXContentBody(builder, params); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(overallBuckets); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(overallBuckets, other.overallBuckets); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetRecordsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetRecordsAction.java similarity index 81% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetRecordsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetRecordsAction.java index a76c0cf348c..b421917970d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetRecordsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/GetRecordsAction.java @@ -6,35 +6,22 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.action.util.PageParams; import org.elasticsearch.xpack.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder; import org.elasticsearch.xpack.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.ml.job.results.Influencer; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -302,38 +289,4 @@ public class GetRecordsAction extends Action { - - private final JobProvider jobProvider; - private final JobManager jobManager; - private final Client client; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - JobProvider jobProvider, JobManager jobManager, Client client) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.jobProvider = jobProvider; - this.jobManager = jobManager; - this.client = client; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - - jobManager.getJobOrThrowIfUnknown(request.getJobId()); - - RecordsQueryBuilder query = new RecordsQueryBuilder() - .includeInterim(request.excludeInterim == false) - .epochStart(request.start) - .epochEnd(request.end) - .from(request.pageParams.getFrom()) - .size(request.pageParams.getSize()) - .recordScore(request.recordScoreFilter) - .sortField(request.sort) - .sortDescending(request.descending); - jobProvider.records(request.jobId, query, page -> listener.onResponse(new Response(page)), listener::onFailure, client); - } - } - } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/IsolateDatafeedAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/IsolateDatafeedAction.java similarity index 57% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/IsolateDatafeedAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/IsolateDatafeedAction.java index aff1d628360..e7b88bec670 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/IsolateDatafeedAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/IsolateDatafeedAction.java @@ -5,44 +5,25 @@ */ package org.elasticsearch.xpack.ml.action; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; -import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; import java.io.IOException; -import java.util.List; import java.util.Objects; /** @@ -54,7 +35,7 @@ import java.util.Objects; * task ensures the current datafeed task can complete inconsequentially while * the datafeed persistent task may be stopped or reassigned on another node. */ -public class IsolateDatafeedAction +public class IsolateDatafeedAction extends Action { public static final IsolateDatafeedAction INSTANCE = new IsolateDatafeedAction(); @@ -103,14 +84,14 @@ public class IsolateDatafeedAction Request() { } - private String getDatafeedId() { + String getDatafeedId() { return datafeedId; } @Override public boolean match(Task task) { - String expectedDescription = MlMetadata.datafeedTaskId(datafeedId); - if (task instanceof StartDatafeedAction.DatafeedTask && expectedDescription.equals(task.getDescription())){ + String expectedDescription = MLMetadataField.datafeedTaskId(datafeedId); + if (task instanceof StartDatafeedAction.DatafeedTaskMatcher && expectedDescription.equals(task.getDescription())){ return true; } return false; @@ -197,63 +178,4 @@ public class IsolateDatafeedAction } } - public static class TransportAction extends TransportTasksAction { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService) { - super(settings, IsolateDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, Request::new, Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME); - } - - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - final ClusterState state = clusterService.state(); - PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(request.getDatafeedId(), tasks); - - if (datafeedTask == null || datafeedTask.getExecutorNode() == null) { - // No running datafeed task to isolate - listener.onResponse(new Response()); - return; - } - - String executorNode = datafeedTask.getExecutorNode(); - DiscoveryNodes nodes = state.nodes(); - if (nodes.resolveNode(executorNode).getVersion().before(Version.V_5_5_0)) { - listener.onFailure(new ElasticsearchException("Force delete datafeed is not supported because the datafeed task " + - "is running on a node [" + executorNode + "] with a version prior to " + Version.V_5_5_0)); - return; - } - - request.setNodes(datafeedTask.getExecutorNode()); - super.doExecute(task, request, listener); - } - - @Override - protected Response newResponse(Request request, List tasks, List taskOperationFailures, - List failedNodeExceptions) { - if (taskOperationFailures.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(taskOperationFailures.get(0).getCause()); - } else if (failedNodeExceptions.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(failedNodeExceptions.get(0)); - } else { - return new Response(); - } - } - - @Override - protected void taskOperation(Request request, StartDatafeedAction.DatafeedTask datafeedTask, ActionListener listener) { - datafeedTask.isolate(); - listener.onResponse(new Response()); - } - - @Override - protected Response readTaskResponse(StreamInput in) throws IOException { - return new Response(in); - } - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/JobTaskRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/JobTaskRequest.java new file mode 100644 index 00000000000..cb7bbf59e50 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/JobTaskRequest.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; + +public class JobTaskRequest> extends BaseTasksRequest { + + String jobId; + + JobTaskRequest() { + } + + JobTaskRequest(String jobId) { + this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); + } + + public String getJobId() { + return jobId; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(jobId); + } + + @Override + public boolean match(Task task) { + return OpenJobAction.JobTaskMatcher.match(task, jobId); + } +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/KillProcessAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/KillProcessAction.java new file mode 100644 index 00000000000..515399e031b --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/KillProcessAction.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Objects; + +public class KillProcessAction extends Action { + + public static final KillProcessAction INSTANCE = new KillProcessAction(); + public static final String NAME = "cluster:internal/xpack/ml/job/kill/process"; + + private KillProcessAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client, KillProcessAction action) { + super(client, action, new Request()); + } + } + + public static class Request extends JobTaskRequest { + + public Request(String jobId) { + super(jobId); + } + + Request() { + super(); + } + } + + public static class Response extends BaseTasksResponse implements Writeable { + + private boolean killed; + + Response() { + super(null, null); + } + + Response(StreamInput in) throws IOException { + super(null, null); + readFrom(in); + } + + Response(boolean killed) { + super(null, null); + this.killed = killed; + } + + public boolean isKilled() { + return killed; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + killed = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(killed); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return killed == response.killed; + } + + @Override + public int hashCode() { + return Objects.hash(killed); + } + } + +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java new file mode 100644 index 00000000000..7dcb968fe5b --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java @@ -0,0 +1,296 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.persistent.PersistentTaskParams; + +import java.io.IOException; +import java.util.Objects; + +public class OpenJobAction extends Action { + + public static final OpenJobAction INSTANCE = new OpenJobAction(); + public static final String NAME = "cluster:admin/xpack/ml/job/open"; + public static final String TASK_NAME = "xpack/ml/job"; + + private OpenJobAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends MasterNodeRequest implements ToXContentObject { + + public static Request fromXContent(XContentParser parser) { + return parseRequest(null, parser); + } + + public static Request parseRequest(String jobId, XContentParser parser) { + JobParams jobParams = JobParams.PARSER.apply(parser, null); + if (jobId != null) { + jobParams.jobId = jobId; + } + return new Request(jobParams); + } + + private JobParams jobParams; + + public Request(JobParams jobParams) { + this.jobParams = jobParams; + } + + public Request(String jobId) { + this.jobParams = new JobParams(jobId); + } + + public Request(StreamInput in) throws IOException { + readFrom(in); + } + + Request() { + } + + public JobParams getJobParams() { + return jobParams; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobParams = new JobParams(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + jobParams.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + jobParams.toXContent(builder, params); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobParams); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + OpenJobAction.Request other = (OpenJobAction.Request) obj; + return Objects.equals(jobParams, other.jobParams); + } + + @Override + public String toString() { + return Strings.toString(this); + } + } + + public static class JobParams implements PersistentTaskParams { + + /** TODO Remove in 7.0.0 */ + public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime"); + + public static final ParseField TIMEOUT = new ParseField("timeout"); + public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, JobParams::new); + + static { + PARSER.declareString(JobParams::setJobId, Job.ID); + PARSER.declareBoolean((p, v) -> {}, IGNORE_DOWNTIME); + PARSER.declareString((params, val) -> + params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); + } + + public static JobParams fromXContent(XContentParser parser) { + return parseRequest(null, parser); + } + + public static JobParams parseRequest(String jobId, XContentParser parser) { + JobParams params = PARSER.apply(parser, null); + if (jobId != null) { + params.jobId = jobId; + } + return params; + } + + private String jobId; + // A big state can take a while to restore. For symmetry with the _close endpoint any + // changes here should be reflected there too. + private TimeValue timeout = MachineLearningClientActionPlugin.STATE_PERSIST_RESTORE_TIMEOUT; + + JobParams() { + } + + public JobParams(String jobId) { + this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); + } + + public JobParams(StreamInput in) throws IOException { + jobId = in.readString(); + if (in.getVersion().onOrBefore(Version.V_5_5_0)) { + // Read `ignoreDowntime` + in.readBoolean(); + } + timeout = TimeValue.timeValueMillis(in.readVLong()); + } + + public String getJobId() { + return jobId; + } + + public void setJobId(String jobId) { + this.jobId = jobId; + } + + public TimeValue getTimeout() { + return timeout; + } + + public void setTimeout(TimeValue timeout) { + this.timeout = timeout; + } + + @Override + public String getWriteableName() { + return TASK_NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(jobId); + if (out.getVersion().onOrBefore(Version.V_5_5_0)) { + // Write `ignoreDowntime` - true by default + out.writeBoolean(true); + } + out.writeVLong(timeout.millis()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, timeout); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + OpenJobAction.JobParams other = (OpenJobAction.JobParams) obj; + return Objects.equals(jobId, other.jobId) && + Objects.equals(timeout, other.timeout); + } + + @Override + public String toString() { + return Strings.toString(this); + } + } + + public static class Response extends AcknowledgedResponse { + public Response() { + super(); + } + + public Response(boolean acknowledged) { + super(acknowledged); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + writeAcknowledged(out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AcknowledgedResponse that = (AcknowledgedResponse) o; + return isAcknowledged() == that.isAcknowledged(); + } + + @Override + public int hashCode() { + return Objects.hash(isAcknowledged()); + } + + } + + public interface JobTaskMatcher { + + static boolean match(Task task, String expectedJobId) { + String expectedDescription = "job-" + expectedJobId; + return task instanceof JobTaskMatcher && expectedDescription.equals(task.getDescription()); + } + } + + static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client, OpenJobAction action) { + super(client, action, new Request()); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PostCalendarEventsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PostCalendarEventsAction.java similarity index 74% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/PostCalendarEventsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PostCalendarEventsAction.java index 4b0b645f2f9..b7932b34790 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PostCalendarEventsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PostCalendarEventsAction.java @@ -243,68 +243,4 @@ public class PostCalendarEventsAction extends Action { - - private final Client client; - private final JobProvider jobProvider; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Client client, JobProvider jobProvider) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.client = client; - this.jobProvider = jobProvider; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - List events = request.getSpecialEvents(); - - ActionListener calendarExistsListener = ActionListener.wrap( - r -> { - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - - for (SpecialEvent event: events) { - IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE); - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - indexRequest.source(event.toXContent(builder, - new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")))); - } catch (IOException e) { - throw new IllegalStateException("Failed to serialise special event", e); - } - bulkRequestBuilder.add(indexRequest); - } - - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), - new ActionListener() { - @Override - public void onResponse(BulkResponse response) { - listener.onResponse(new Response(events)); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure( - ExceptionsHelper.serverError("Error indexing special event", e)); - } - }); - }, - listener::onFailure); - - checkCalendarExists(request.getCalendarId(), calendarExistsListener); - } - - private void checkCalendarExists(String calendarId, ActionListener listener) { - jobProvider.calendar(calendarId, ActionListener.wrap( - c -> listener.onResponse(true), - listener::onFailure - )); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PostDataAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PostDataAction.java similarity index 72% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/PostDataAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PostDataAction.java index 25c5f329be1..9880bf595bf 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PostDataAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PostDataAction.java @@ -6,35 +6,23 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.job.config.DataDescription; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import java.io.IOException; import java.util.Objects; -import java.util.Optional; public class PostDataAction extends Action { @@ -71,7 +59,7 @@ public class PostDataAction extends Action { + public static class Request extends JobTaskRequest { public static final ParseField RESET_START = new ParseField("reset_start"); public static final ParseField RESET_END = new ParseField("reset_end"); @@ -234,40 +222,4 @@ public class PostDataAction extends Action { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - AutodetectProcessManager processManager) { - super(settings, PostDataAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - Request::new, Response::new, ThreadPool.Names.SAME, processManager); - // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread - } - - @Override - protected Response readTaskResponse(StreamInput in) throws IOException { - Response response = new Response(); - response.readFrom(in); - return response; - } - - @Override - protected void taskOperation(Request request, OpenJobAction.JobTask task, ActionListener listener) { - TimeRange timeRange = TimeRange.builder().startTime(request.getResetStart()).endTime(request.getResetEnd()).build(); - DataLoadParams params = new DataLoadParams(timeRange, Optional.ofNullable(request.getDataDescription())); - try { - processManager.processData(task, request.content.streamInput(), request.getXContentType(), params, (dataCounts, e) -> { - if (dataCounts != null) { - listener.onResponse(new Response(dataCounts)); - } else { - listener.onFailure(e); - } - }); - } catch (Exception e) { - listener.onFailure(e); - } - } - - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java new file mode 100644 index 00000000000..6a12bf615e8 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java @@ -0,0 +1,168 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Objects; + +public class PreviewDatafeedAction extends Action { + + public static final PreviewDatafeedAction INSTANCE = new PreviewDatafeedAction(); + public static final String NAME = "cluster:admin/xpack/ml/datafeeds/preview"; + + private PreviewDatafeedAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private String datafeedId; + + Request() { + } + + public Request(String datafeedId) { + setDatafeedId(datafeedId); + } + + public String getDatafeedId() { + return datafeedId; + } + + public final void setDatafeedId(String datafeedId) { + this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + datafeedId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(datafeedId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(datafeedId); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(datafeedId, other.datafeedId); + } + } + + static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private BytesReference preview; + + Response() { + } + + Response(BytesReference preview) { + this.preview = preview; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + preview = in.readBytesReference(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBytesReference(preview); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.rawValue(preview, XContentType.JSON); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(preview); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(preview, other.preview); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java similarity index 58% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java index 6ba0aa5eaf3..e7cb59b484b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java @@ -6,52 +6,28 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetaIndex; -import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.calendars.Calendar; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.watcher.support.Exceptions; import java.io.IOException; -import java.util.Collections; -import java.util.List; import java.util.Objects; import java.util.Set; import java.util.function.Consumer; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; public class PutCalendarAction extends Action { public static final PutCalendarAction INSTANCE = new PutCalendarAction(); @@ -202,66 +178,4 @@ public class PutCalendarAction extends Action { - - private final Client client; - private final ClusterService clusterService; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Client client, ClusterService clusterService) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.client = client; - this.clusterService = clusterService; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - Calendar calendar = request.getCalendar(); - - checkJobsExist(calendar.getJobIds(), listener::onFailure); - - IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, calendar.documentId()); - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - indexRequest.source(calendar.toXContent(builder, - new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")))); - } catch (IOException e) { - throw new IllegalStateException("Failed to serialise calendar with id [" + calendar.getId() + "]", e); - } - - // Make it an error to overwrite an existing calendar - indexRequest.opType(DocWriteRequest.OpType.CREATE); - indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - - executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, - new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - listener.onResponse(new Response(calendar)); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure( - ExceptionsHelper.serverError("Error putting calendar with id [" + calendar.getId() + "]", e)); - } - }); - } - - private void checkJobsExist(List jobIds, Consumer errorHandler) { - ClusterState state = clusterService.state(); - MlMetadata mlMetadata = state.getMetaData().custom(MlMetadata.TYPE); - for (String jobId: jobIds) { - Set jobs = mlMetadata.expandJobIds(jobId, true); - if (jobs.isEmpty()) { - errorHandler.accept(ExceptionsHelper.missingJobException(jobId)); - return; - } - } - } - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java new file mode 100644 index 00000000000..2aa6306f383 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; + +import java.io.IOException; +import java.util.Objects; + +public class PutDatafeedAction extends Action { + + public static final PutDatafeedAction INSTANCE = new PutDatafeedAction(); + public static final String NAME = "cluster:admin/xpack/ml/datafeeds/put"; + + private PutDatafeedAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends AcknowledgedRequest implements ToXContentObject { + + public static Request parseRequest(String datafeedId, XContentParser parser) { + DatafeedConfig.Builder datafeed = DatafeedConfig.CONFIG_PARSER.apply(parser, null); + datafeed.setId(datafeedId); + return new Request(datafeed.build()); + } + + private DatafeedConfig datafeed; + + public Request(DatafeedConfig datafeed) { + this.datafeed = datafeed; + } + + Request() { + } + + public DatafeedConfig getDatafeed() { + return datafeed; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + datafeed = new DatafeedConfig(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + datafeed.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + datafeed.toXContent(builder, params); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(datafeed, request.datafeed); + } + + @Override + public int hashCode() { + return Objects.hash(datafeed); + } + } + + public static class RequestBuilder extends MasterNodeOperationRequestBuilder { + + public RequestBuilder(ElasticsearchClient client, PutDatafeedAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends AcknowledgedResponse implements ToXContentObject { + + private DatafeedConfig datafeed; + + public Response(boolean acked, DatafeedConfig datafeed) { + super(acked); + this.datafeed = datafeed; + } + + Response() { + } + + public DatafeedConfig getResponse() { + return datafeed; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + datafeed = new DatafeedConfig(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + datafeed.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + datafeed.doXContentBody(builder, params); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(datafeed, response.datafeed); + } + + @Override + public int hashCode() { + return Objects.hash(datafeed); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java similarity index 57% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java index b6ed3e45060..c89fb4086ac 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java @@ -6,45 +6,24 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.job.config.MlFilter; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; -import java.util.Collections; import java.util.Objects; -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; - public class PutFilterAction extends Action { @@ -161,47 +140,5 @@ public class PutFilterAction extends Action { - - private final Client client; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Client client) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.client = client; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - MlFilter filter = request.getFilter(); - IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); - indexRequest.source(filter.toXContent(builder, params)); - } catch (IOException e) { - throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); - } - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - bulkRequestBuilder.add(indexRequest); - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), - new ActionListener() { - @Override - public void onResponse(BulkResponse indexResponse) { - listener.onResponse(new Response()); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(ExceptionsHelper.serverError("Error putting filter with id [" + filter.getId() + "]", e)); - } - }); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java similarity index 69% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java index 548544e59b8..ebc711cfd48 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java @@ -6,34 +6,17 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.license.LicenseUtils; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.messages.Messages; @@ -200,48 +183,4 @@ public class PutJobAction extends Action { - - private final JobManager jobManager; - private final XPackLicenseState licenseState; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, XPackLicenseState licenseState, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager) { - super(settings, PutJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, Request::new); - this.licenseState = licenseState; - this.jobManager = jobManager; - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { - jobManager.putJob(request, state, listener); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - if (licenseState.isMachineLearningAllowed()) { - super.doExecute(task, request, listener); - } else { - listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.MACHINE_LEARNING)); - } - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java similarity index 51% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java index f01e9d529fe..71844b65c70 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java @@ -5,50 +5,28 @@ */ package org.elasticsearch.xpack.ml.action; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.config.JobState; -import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; -import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; -import java.util.Date; import java.util.Objects; -import java.util.function.Consumer; public class RevertModelSnapshotAction extends Action { @@ -250,128 +228,4 @@ extends Action { - - private final Client client; - private final JobManager jobManager; - private final JobProvider jobProvider; - private final JobDataCountsPersister jobDataCountsPersister; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager, JobProvider jobProvider, - ClusterService clusterService, Client client, JobDataCountsPersister jobDataCountsPersister) { - super(settings, NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, Request::new); - this.client = client; - this.jobManager = jobManager; - this.jobProvider = jobProvider; - this.jobDataCountsPersister = jobDataCountsPersister; - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { - logger.debug("Received request to revert to snapshot id '{}' for job '{}', deleting intervening results: {}", - request.getSnapshotId(), request.getJobId(), request.getDeleteInterveningResults()); - - Job job = JobManager.getJobOrThrowIfUnknown(request.getJobId(), clusterService.state()); - JobState jobState = jobManager.getJobState(job.getId()); - if (jobState.equals(JobState.CLOSED) == false) { - throw ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.REST_JOB_NOT_CLOSED_REVERT)); - } - - getModelSnapshot(request, jobProvider, modelSnapshot -> { - ActionListener wrappedListener = listener; - if (request.getDeleteInterveningResults()) { - wrappedListener = wrapDeleteOldDataListener(wrappedListener, modelSnapshot, request.getJobId()); - wrappedListener = wrapRevertDataCountsListener(wrappedListener, modelSnapshot, request.getJobId()); - } - jobManager.revertSnapshot(request, wrappedListener, modelSnapshot); - }, listener::onFailure); - } - - private void getModelSnapshot(Request request, JobProvider provider, Consumer handler, - Consumer errorHandler) { - logger.info("Reverting to snapshot '" + request.getSnapshotId() + "'"); - - provider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { - if (modelSnapshot == null) { - throw new ResourceNotFoundException(Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, request.getSnapshotId(), - request.getJobId())); - } - handler.accept(modelSnapshot.result); - }, errorHandler); - } - - private ActionListener wrapDeleteOldDataListener( - ActionListener listener, - ModelSnapshot modelSnapshot, String jobId) { - - // If we need to delete buckets that occurred after the snapshot, we - // wrap the listener with one that invokes the OldDataRemover on - // acknowledged responses - return ActionListener.wrap(response -> { - if (response.isAcknowledged()) { - Date deleteAfter = modelSnapshot.getLatestResultTimeStamp(); - logger.debug("Removing intervening records: last record: " + deleteAfter + ", last result: " - + modelSnapshot.getLatestResultTimeStamp()); - - logger.info("Deleting results after '" + deleteAfter + "'"); - - JobDataDeleter dataDeleter = new JobDataDeleter(client, jobId); - dataDeleter.deleteResultsFromTime(deleteAfter.getTime() + 1, new ActionListener() { - @Override - public void onResponse(Boolean success) { - listener.onResponse(response); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - } - }, listener::onFailure); - } - - private ActionListener wrapRevertDataCountsListener( - ActionListener listener, - ModelSnapshot modelSnapshot, String jobId) { - - - return ActionListener.wrap(response -> { - if (response.isAcknowledged()) { - jobProvider.dataCounts(jobId, counts -> { - counts.setLatestRecordTimeStamp(modelSnapshot.getLatestRecordTimeStamp()); - jobDataCountsPersister.persistDataCounts(jobId, counts, new ActionListener() { - @Override - public void onResponse(Boolean aBoolean) { - listener.onResponse(response); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - }, listener::onFailure); - } - }, listener::onFailure); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - } - } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/StartDatafeedAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/StartDatafeedAction.java new file mode 100644 index 00000000000..001a765cc1a --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/StartDatafeedAction.java @@ -0,0 +1,327 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.persistent.PersistentTaskParams; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.LongSupplier; + +public class StartDatafeedAction + extends Action { + + public static final ParseField START_TIME = new ParseField("start"); + public static final ParseField END_TIME = new ParseField("end"); + public static final ParseField TIMEOUT = new ParseField("timeout"); + + public static final StartDatafeedAction INSTANCE = new StartDatafeedAction(); + public static final String NAME = "cluster:admin/xpack/ml/datafeed/start"; + public static final String TASK_NAME = "xpack/ml/datafeed"; + + private StartDatafeedAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends MasterNodeRequest implements ToXContentObject { + + public static Request fromXContent(XContentParser parser) { + return parseRequest(null, parser); + } + + public static Request parseRequest(String datafeedId, XContentParser parser) { + DatafeedParams params = DatafeedParams.PARSER.apply(parser, null); + if (datafeedId != null) { + params.datafeedId = datafeedId; + } + return new Request(params); + } + + private DatafeedParams params; + + public Request(String datafeedId, long startTime) { + this.params = new DatafeedParams(datafeedId, startTime); + } + + public Request(String datafeedId, String startTime) { + this.params = new DatafeedParams(datafeedId, startTime); + } + + public Request(DatafeedParams params) { + this.params = params; + } + + public Request(StreamInput in) throws IOException { + readFrom(in); + } + + Request() { + } + + public DatafeedParams getParams() { + return params; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException e = null; + if (params.endTime != null && params.endTime <= params.startTime) { + e = ValidateActions.addValidationError(START_TIME.getPreferredName() + " [" + + params.startTime + "] must be earlier than " + END_TIME.getPreferredName() + + " [" + params.endTime + "]", e); + } + return e; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + params = new DatafeedParams(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + params.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + this.params.toXContent(builder, params); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(params); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(params, other.params); + } + } + + public static class DatafeedParams implements PersistentTaskParams { + + public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, DatafeedParams::new); + + static { + PARSER.declareString((params, datafeedId) -> params.datafeedId = datafeedId, DatafeedConfig.ID); + PARSER.declareString((params, startTime) -> params.startTime = parseDateOrThrow( + startTime, START_TIME, System::currentTimeMillis), START_TIME); + PARSER.declareString(DatafeedParams::setEndTime, END_TIME); + PARSER.declareString((params, val) -> + params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); + } + + static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now) { + DateMathParser dateMathParser = new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); + + try { + return dateMathParser.parse(date, now); + } catch (Exception e) { + String msg = Messages.getMessage(Messages.REST_INVALID_DATETIME_PARAMS, paramName.getPreferredName(), date); + throw new ElasticsearchParseException(msg, e); + } + } + + public static DatafeedParams fromXContent(XContentParser parser) { + return parseRequest(null, parser); + } + + public static DatafeedParams parseRequest(String datafeedId, XContentParser parser) { + DatafeedParams params = PARSER.apply(parser, null); + if (datafeedId != null) { + params.datafeedId = datafeedId; + } + return params; + } + + public DatafeedParams(String datafeedId, long startTime) { + this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); + this.startTime = startTime; + } + + public DatafeedParams(String datafeedId, String startTime) { + this(datafeedId, parseDateOrThrow(startTime, START_TIME, System::currentTimeMillis)); + } + + public DatafeedParams(StreamInput in) throws IOException { + datafeedId = in.readString(); + startTime = in.readVLong(); + endTime = in.readOptionalLong(); + timeout = TimeValue.timeValueMillis(in.readVLong()); + } + + DatafeedParams() { + } + + private String datafeedId; + private long startTime; + private Long endTime; + private TimeValue timeout = TimeValue.timeValueSeconds(20); + + public String getDatafeedId() { + return datafeedId; + } + + public long getStartTime() { + return startTime; + } + + public Long getEndTime() { + return endTime; + } + + public void setEndTime(String endTime) { + setEndTime(parseDateOrThrow(endTime, END_TIME, System::currentTimeMillis)); + } + + public void setEndTime(Long endTime) { + this.endTime = endTime; + } + + public TimeValue getTimeout() { + return timeout; + } + + public void setTimeout(TimeValue timeout) { + this.timeout = timeout; + } + + @Override + public String getWriteableName() { + return TASK_NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(datafeedId); + out.writeVLong(startTime); + out.writeOptionalLong(endTime); + out.writeVLong(timeout.millis()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); + builder.field(START_TIME.getPreferredName(), String.valueOf(startTime)); + if (endTime != null) { + builder.field(END_TIME.getPreferredName(), String.valueOf(endTime)); + } + builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(datafeedId, startTime, endTime, timeout); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + DatafeedParams other = (DatafeedParams) obj; + return Objects.equals(datafeedId, other.datafeedId) && + Objects.equals(startTime, other.startTime) && + Objects.equals(endTime, other.endTime) && + Objects.equals(timeout, other.timeout); + } + } + + public static class Response extends AcknowledgedResponse { + public Response() { + super(); + } + + public Response(boolean acknowledged) { + super(acknowledged); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + writeAcknowledged(out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AcknowledgedResponse that = (AcknowledgedResponse) o; + return isAcknowledged() == that.isAcknowledged(); + } + + @Override + public int hashCode() { + return Objects.hash(isAcknowledged()); + } + + } + + static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client, StartDatafeedAction action) { + super(client, action, new Request()); + } + } + + public interface DatafeedTaskMatcher { + + } + +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/StopDatafeedAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/StopDatafeedAction.java new file mode 100644 index 00000000000..d2615c0e188 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/StopDatafeedAction.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Objects; + +public class StopDatafeedAction + extends Action { + + public static final StopDatafeedAction INSTANCE = new StopDatafeedAction(); + public static final String NAME = "cluster:admin/xpack/ml/datafeed/stop"; + public static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueMinutes(5); + + private StopDatafeedAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends BaseTasksRequest implements ToXContentObject { + + public static final ParseField TIMEOUT = new ParseField("timeout"); + public static final ParseField FORCE = new ParseField("force"); + public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds"); + + public static ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); + + static { + PARSER.declareString((request, datafeedId) -> request.datafeedId = datafeedId, DatafeedConfig.ID); + PARSER.declareString((request, val) -> + request.setStopTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); + PARSER.declareBoolean(Request::setForce, FORCE); + PARSER.declareBoolean(Request::setAllowNoDatafeeds, ALLOW_NO_DATAFEEDS); + } + + public static Request fromXContent(XContentParser parser) { + return parseRequest(null, parser); + } + + public static Request parseRequest(String datafeedId, XContentParser parser) { + Request request = PARSER.apply(parser, null); + if (datafeedId != null) { + request.datafeedId = datafeedId; + } + return request; + } + + private String datafeedId; + private String[] resolvedStartedDatafeedIds; + private TimeValue stopTimeout = DEFAULT_TIMEOUT; + private boolean force = false; + private boolean allowNoDatafeeds = true; + + public Request(String datafeedId) { + this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); + this.resolvedStartedDatafeedIds = new String[] { datafeedId }; + } + + Request() { + } + + String getDatafeedId() { + return datafeedId; + } + + String[] getResolvedStartedDatafeedIds() { + return resolvedStartedDatafeedIds; + } + + void setResolvedStartedDatafeedIds(String[] resolvedStartedDatafeedIds) { + this.resolvedStartedDatafeedIds = resolvedStartedDatafeedIds; + } + + public TimeValue getStopTimeout() { + return stopTimeout; + } + + public void setStopTimeout(TimeValue stopTimeout) { + this.stopTimeout = ExceptionsHelper.requireNonNull(stopTimeout, TIMEOUT.getPreferredName()); + } + + public boolean isForce() { + return force; + } + + public void setForce(boolean force) { + this.force = force; + } + + public boolean allowNoDatafeeds() { + return allowNoDatafeeds; + } + + public void setAllowNoDatafeeds(boolean allowNoDatafeeds) { + this.allowNoDatafeeds = allowNoDatafeeds; + } + + @Override + public boolean match(Task task) { + for (String id : resolvedStartedDatafeedIds) { + String expectedDescription = MLMetadataField.datafeedTaskId(id); + if (task instanceof StartDatafeedAction.DatafeedTaskMatcher && expectedDescription.equals(task.getDescription())){ + return true; + } + } + return false; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + datafeedId = in.readString(); + resolvedStartedDatafeedIds = in.readStringArray(); + stopTimeout = new TimeValue(in); + force = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_6_1_0)) { + allowNoDatafeeds = in.readBoolean(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(datafeedId); + out.writeStringArray(resolvedStartedDatafeedIds); + stopTimeout.writeTo(out); + out.writeBoolean(force); + if (out.getVersion().onOrAfter(Version.V_6_1_0)) { + out.writeBoolean(allowNoDatafeeds); + } + } + + @Override + public int hashCode() { + return Objects.hash(datafeedId, stopTimeout, force, allowNoDatafeeds); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); + builder.field(TIMEOUT.getPreferredName(), stopTimeout.getStringRep()); + builder.field(FORCE.getPreferredName(), force); + builder.field(ALLOW_NO_DATAFEEDS.getPreferredName(), allowNoDatafeeds); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(datafeedId, other.datafeedId) && + Objects.equals(stopTimeout, other.stopTimeout) && + Objects.equals(force, other.force) && + Objects.equals(allowNoDatafeeds, other.allowNoDatafeeds); + } + } + + public static class Response extends BaseTasksResponse implements Writeable { + + private boolean stopped; + + public Response(boolean stopped) { + super(null, null); + this.stopped = stopped; + } + + public Response(StreamInput in) throws IOException { + super(null, null); + readFrom(in); + } + + public Response() { + super(null, null); + } + + public boolean isStopped() { + return stopped; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + stopped = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(stopped); + } + } + + static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client, StopDatafeedAction action) { + super(client, action, new Request()); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateCalendarJobAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateCalendarJobAction.java similarity index 71% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateCalendarJobAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateCalendarJobAction.java index 7753cfb4923..619664ca757 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateCalendarJobAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateCalendarJobAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.calendars.Calendar; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; @@ -126,43 +127,5 @@ public class UpdateCalendarJobAction extends Action { - - private final ClusterService clusterService; - private final JobProvider jobProvider; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, JobProvider jobProvider) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.clusterService = clusterService; - this.jobProvider = jobProvider; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - ClusterState state = clusterService.state(); - MlMetadata mlMetadata = state.getMetaData().custom(MlMetadata.TYPE); - for (String jobToAdd: request.getJobIdsToAdd()) { - if (mlMetadata.isGroupOrJob(jobToAdd) == false) { - listener.onFailure(ExceptionsHelper.missingJobException(jobToAdd)); - return; - } - } - - for (String jobToRemove: request.getJobIdsToRemove()) { - if (mlMetadata.isGroupOrJob(jobToRemove) == false) { - listener.onFailure(ExceptionsHelper.missingJobException(jobToRemove)); - return; - } - } - - jobProvider.updateCalendar(request.getCalendarId(), request.getJobIdsToAdd(), request.getJobIdsToRemove(), - c -> listener.onResponse(new PutCalendarAction.Response(c)), listener::onFailure); - } - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java new file mode 100644 index 00000000000..3b293b6ff97 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.datafeed.DatafeedUpdate; + +import java.io.IOException; +import java.util.Objects; + +public class UpdateDatafeedAction extends Action { + + public static final UpdateDatafeedAction INSTANCE = new UpdateDatafeedAction(); + public static final String NAME = "cluster:admin/xpack/ml/datafeeds/update"; + + private UpdateDatafeedAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public PutDatafeedAction.Response newResponse() { + return new PutDatafeedAction.Response(); + } + + public static class Request extends AcknowledgedRequest implements ToXContentObject { + + public static Request parseRequest(String datafeedId, XContentParser parser) { + DatafeedUpdate.Builder update = DatafeedUpdate.PARSER.apply(parser, null); + update.setId(datafeedId); + return new Request(update.build()); + } + + private DatafeedUpdate update; + + public Request(DatafeedUpdate update) { + this.update = update; + } + + Request() { + } + + public DatafeedUpdate getUpdate() { + return update; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + update = new DatafeedUpdate(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + update.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + update.toXContent(builder, params); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(update, request.update); + } + + @Override + public int hashCode() { + return Objects.hash(update); + } + } + + public static class RequestBuilder extends MasterNodeOperationRequestBuilder { + + public RequestBuilder(ElasticsearchClient client, UpdateDatafeedAction action) { + super(client, action, new Request()); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateJobAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateJobAction.java similarity index 62% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateJobAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateJobAction.java index 97083a4c456..18c93a9e96b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateJobAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateJobAction.java @@ -6,30 +6,16 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.JobUpdate; import java.io.IOException; @@ -132,42 +118,4 @@ public class UpdateJobAction extends Action { - - private final JobManager jobManager; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - JobManager jobManager) { - super(settings, UpdateJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, UpdateJobAction.Request::new); - this.jobManager = jobManager; - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected PutJobAction.Response newResponse() { - return new PutJobAction.Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, - ActionListener listener) throws Exception { - if (request.getJobId().equals(MetaData.ALL)) { - throw new IllegalArgumentException("Job Id " + MetaData.ALL + " cannot be for update"); - } - - jobManager.updateJob(request.getJobId(), request.getJobUpdate(), request, listener); - } - - @Override - protected ClusterBlockException checkBlock(UpdateJobAction.Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java similarity index 60% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java index 12904d8a159..abec19eb041 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java @@ -5,53 +5,29 @@ */ package org.elasticsearch.xpack.ml.action; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; -import org.elasticsearch.xpack.ml.job.results.Result; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshotField; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; -import java.util.function.Consumer; - -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; public class UpdateModelSnapshotAction extends Action { @@ -79,7 +55,7 @@ public class UpdateModelSnapshotAction extends Action request.jobId = jobId, Job.ID); - PARSER.declareString((request, snapshotId) -> request.snapshotId = snapshotId, ModelSnapshot.SNAPSHOT_ID); + PARSER.declareString((request, snapshotId) -> request.snapshotId = snapshotId, ModelSnapshotField.SNAPSHOT_ID); PARSER.declareString(Request::setDescription, ModelSnapshot.DESCRIPTION); PARSER.declareBoolean(Request::setRetain, ModelSnapshot.RETAIN); } @@ -105,7 +81,7 @@ public class UpdateModelSnapshotAction extends Action { - - private final JobProvider jobProvider; - private final Client client; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider, Client client) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.jobProvider = jobProvider; - this.client = client; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - logger.debug("Received request to update model snapshot [{}] for job [{}]", request.getSnapshotId(), request.getJobId()); - jobProvider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { - if (modelSnapshot == null) { - listener.onFailure(new ResourceNotFoundException(Messages.getMessage( - Messages.REST_NO_SUCH_MODEL_SNAPSHOT, request.getSnapshotId(), request.getJobId()))); - } else { - Result updatedSnapshot = applyUpdate(request, modelSnapshot); - indexModelSnapshot(updatedSnapshot, b -> { - // The quantiles can be large, and totally dominate the output - - // it's clearer to remove them - listener.onResponse(new Response(new ModelSnapshot.Builder(updatedSnapshot.result).setQuantiles(null).build())); - }, listener::onFailure); - } - }, listener::onFailure); - } - - private static Result applyUpdate(Request request, Result target) { - ModelSnapshot.Builder updatedSnapshotBuilder = new ModelSnapshot.Builder(target.result); - if (request.getDescription() != null) { - updatedSnapshotBuilder.setDescription(request.getDescription()); - } - if (request.getRetain() != null) { - updatedSnapshotBuilder.setRetain(request.getRetain()); - } - return new Result(target.index, updatedSnapshotBuilder.build()); - } - - private void indexModelSnapshot(Result modelSnapshot, Consumer handler, Consumer errorHandler) { - IndexRequest indexRequest = new IndexRequest(modelSnapshot.index, ElasticsearchMappings.DOC_TYPE, - ModelSnapshot.documentId(modelSnapshot.result)); - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - modelSnapshot.result.toXContent(builder, ToXContent.EMPTY_PARAMS); - indexRequest.source(builder); - } catch (IOException e) { - errorHandler.accept(e); - return; - } - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - bulkRequestBuilder.add(indexRequest); - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), - new ActionListener() { - @Override - public void onResponse(BulkResponse indexResponse) { - handler.accept(true); - } - - @Override - public void onFailure(Exception e) { - errorHandler.accept(e); - } - }); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateProcessAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateProcessAction.java similarity index 70% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateProcessAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateProcessAction.java index 9ecbc67091f..8b9838909c6 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateProcessAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/UpdateProcessAction.java @@ -7,28 +7,17 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.calendars.SpecialEvent; import org.elasticsearch.xpack.ml.job.config.JobUpdate; import org.elasticsearch.xpack.ml.job.config.ModelPlotConfig; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; -import org.elasticsearch.xpack.ml.job.process.autodetect.UpdateParams; import java.io.IOException; import java.util.List; @@ -65,7 +54,7 @@ public class UpdateProcessAction extends private boolean isUpdated; - private Response() { + Response() { super(null, null); this.isUpdated = true; } @@ -118,7 +107,7 @@ public class UpdateProcessAction extends } } - public static class Request extends TransportJobTaskAction.JobTaskRequest { + public static class Request extends JobTaskRequest { private ModelPlotConfig modelPlotConfig; private List detectorUpdates; @@ -195,40 +184,4 @@ public class UpdateProcessAction extends } } - public static class TransportAction extends TransportJobTaskAction { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - AutodetectProcessManager processManager) { - super(settings, NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - Request::new, Response::new, ThreadPool.Names.SAME, processManager); - // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread - } - - @Override - protected Response readTaskResponse(StreamInput in) throws IOException { - Response response = new Response(); - response.readFrom(in); - return response; - } - - @Override - protected void taskOperation(Request request, OpenJobAction.JobTask task, ActionListener listener) { - try { - processManager.writeUpdateProcessMessage(task, - new UpdateParams(request.getModelPlotConfig(), - request.getDetectorUpdates(), request.isUpdateSpecialEvents()), - e -> { - if (e == null) { - listener.onResponse(new Response()); - } else { - listener.onFailure(e); - } - }); - } catch (Exception e) { - listener.onFailure(e); - } - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java similarity index 78% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java index bc417798fa6..96a0728fabe 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java @@ -6,24 +6,16 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.job.config.Detector; import java.io.IOException; @@ -143,19 +135,4 @@ extends Action { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, ValidateDetectorAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, - Request::new); - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - listener.onResponse(new Response(true)); - } - - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateJobConfigAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ValidateJobConfigAction.java similarity index 80% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateJobConfigAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ValidateJobConfigAction.java index 4f406b50a88..46d664f1b15 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateJobConfigAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/ValidateJobConfigAction.java @@ -6,22 +6,14 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.messages.Messages; @@ -149,19 +141,4 @@ extends Action { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, ValidateJobConfigAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, - Request::new); - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - listener.onResponse(new Response(true)); - } - - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/util/PageParams.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/util/PageParams.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/util/PageParams.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/util/PageParams.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/util/QueryPage.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/util/QueryPage.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/util/QueryPage.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/action/util/QueryPage.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java index 5a99fc6c6ef..f061efe5ebf 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java @@ -25,8 +25,8 @@ public class Calendar implements ToXContentObject, Writeable { public static final String CALENDAR_TYPE = "calendar"; public static final ParseField TYPE = new ParseField("type"); - public static final ParseField ID = new ParseField("calendar_id"); public static final ParseField JOB_IDS = new ParseField("job_ids"); + public static final ParseField ID = new ParseField("calendar_id"); private static final String DOCUMENT_ID_PREFIX = "calendar_"; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/ChunkingConfig.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/ChunkingConfig.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/ChunkingConfig.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/ChunkingConfig.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfig.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfig.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfig.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfig.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidator.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidator.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidator.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidator.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedState.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedState.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedState.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedState.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/ExtractorUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/ExtractorUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/ExtractorUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/ExtractorUtils.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Condition.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Condition.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Condition.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Condition.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Connective.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Connective.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Connective.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Connective.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DataDescription.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/DataDescription.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DataDescription.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/DataDescription.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DefaultDetectorDescription.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/DefaultDetectorDescription.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DefaultDetectorDescription.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/DefaultDetectorDescription.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DetectionRule.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/DetectionRule.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DetectionRule.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/DetectionRule.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java index 726fef8c91a..1569836f0d3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java @@ -465,7 +465,7 @@ public class Detector implements ToXContentObject, Writeable { * Segments the analysis along another field to have completely * independent baselines for each instance of partitionfield * - * @return The Partition Field + * @return The Partition ThrottlerField */ public String getPartitionFieldName() { return partitionFieldName; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java index 78998d86a11..6f64e309ec3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.MlStrings; import org.elasticsearch.xpack.ml.utils.time.TimeUtils; @@ -54,7 +54,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO public static final String ANOMALY_DETECTOR_JOB_TYPE = "anomaly_detector"; /* - * Field names used in serialization + * ThrottlerField names used in serialization */ public static final ParseField ID = new ParseField("job_id"); public static final ParseField JOB_TYPE = new ParseField("job_type"); @@ -268,7 +268,7 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO * @return The job's index name */ public String getResultsIndexName() { - return AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + resultsIndexName; + return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + resultsIndexName; } /** @@ -1118,8 +1118,8 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO ExceptionsHelper.requireNonNull(createTime, CREATE_TIME.getPreferredName()); if (Strings.isNullOrEmpty(resultsIndexName)) { - resultsIndexName = AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT; - } else if (!resultsIndexName.equals(AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT)) { + resultsIndexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; + } else if (!resultsIndexName.equals(AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT)) { // User-defined names are prepended with "custom" // Conditional guards against multiple prepending due to updates instead of first creation resultsIndexName = resultsIndexName.startsWith("custom-") diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobState.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/JobState.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobState.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/JobState.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobTaskStatus.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/JobTaskStatus.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobTaskStatus.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/JobTaskStatus.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobUpdate.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/JobUpdate.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobUpdate.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/JobUpdate.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/MlFilter.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/MlFilter.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/MlFilter.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/MlFilter.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfig.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfig.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfig.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfig.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Operator.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Operator.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Operator.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/Operator.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleCondition.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleCondition.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleCondition.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleCondition.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleConditionType.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleConditionType.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleConditionType.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleConditionType.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/groups/GroupOrJob.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/groups/GroupOrJob.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/groups/GroupOrJob.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/groups/GroupOrJob.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/groups/GroupOrJobLookup.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/groups/GroupOrJobLookup.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/groups/GroupOrJobLookup.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/groups/GroupOrJobLookup.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java similarity index 97% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java index 8eecd9b18ff..97d275de3f1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.ml.job.messages; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; import java.text.MessageFormat; import java.util.Locale; @@ -110,7 +110,8 @@ public final class Messages { public static final String JOB_CONFIG_FIELD_VALUE_TOO_LOW = "{0} cannot be less than {1,number}. Value = {2,number}"; public static final String JOB_CONFIG_MODEL_MEMORY_LIMIT_TOO_LOW = "model_memory_limit must be at least 1 MiB. Value = {0,number}"; public static final String JOB_CONFIG_MODEL_MEMORY_LIMIT_GREATER_THAN_MAX = - "model_memory_limit [{0}] must be less than the value of the " + MachineLearning.MAX_MODEL_MEMORY_LIMIT.getKey() + + "model_memory_limit [{0}] must be less than the value of the " + + MachineLearningClientActionPlugin.MAX_MODEL_MEMORY_LIMIT.getKey() + " setting [{1}]"; public static final String JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED = "The ''{0}'' function cannot be used in jobs that will take pre-summarized input"; @@ -122,9 +123,10 @@ public final class Messages { public static final String JOB_CONFIG_INVALID_CREATE_SETTINGS = "The job is configured with fields [{0}] that are illegal to set at job creation"; public static final String JOB_CONFIG_INVALID_FIELDNAME_CHARS = - "Invalid field name ''{0}''. Field names including over, by and partition fields cannot contain any of these characters: {1}"; + "Invalid field name ''{0}''. ThrottlerField names including over, by and partition " + + "fields cannot contain any of these characters: {1}"; public static final String JOB_CONFIG_INVALID_FIELDNAME = - "Invalid field name ''{0}''. Field names including over, by and partition fields cannot be ''{1}''"; + "Invalid field name ''{0}''. ThrottlerField names including over, by and partition fields cannot be ''{1}''"; public static final String JOB_CONFIG_INVALID_TIMEFORMAT = "Invalid Time format string ''{0}''"; public static final String JOB_CONFIG_MISSING_ANALYSISCONFIG = "An analysis_config must be set"; public static final String JOB_CONFIG_MISSING_DATA_DESCRIPTION = "A data_description must be set"; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndex.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndex.java similarity index 78% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndex.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndex.java index b52edcc07d2..a68d33a452f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndex.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; /** @@ -13,15 +14,11 @@ import org.elasticsearch.xpack.ml.MlMetadata; */ public final class AnomalyDetectorsIndex { - public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; - private static final String STATE_INDEX_NAME = ".ml-state"; - public static final String RESULTS_INDEX_DEFAULT = "shared"; - private AnomalyDetectorsIndex() { } public static String jobResultsIndexPrefix() { - return RESULTS_INDEX_PREFIX; + return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX; } /** @@ -30,7 +27,7 @@ public final class AnomalyDetectorsIndex { * @return The read alias */ public static String jobResultsAliasedName(String jobId) { - return RESULTS_INDEX_PREFIX + jobId; + return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + jobId; } /** @@ -41,7 +38,7 @@ public final class AnomalyDetectorsIndex { public static String resultsWriteAlias(String jobId) { // ".write" rather than simply "write" to avoid the danger of clashing // with the read alias of a job whose name begins with "write-" - return RESULTS_INDEX_PREFIX + ".write-" + jobId; + return AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + ".write-" + jobId; } /** @@ -50,7 +47,7 @@ public final class AnomalyDetectorsIndex { * @return The index name */ public static String getPhysicalIndexFromState(ClusterState state, String jobId) { - MlMetadata meta = state.getMetaData().custom(MlMetadata.TYPE); + MlMetadata meta = state.getMetaData().custom(MLMetadataField.TYPE); return meta.getJobs().get(jobId).getResultsIndexName(); } @@ -59,6 +56,6 @@ public final class AnomalyDetectorsIndex { * @return The index name */ public static String jobStateIndexName() { - return STATE_INDEX_NAME; + return AnomalyDetectorsIndexFields.STATE_INDEX_NAME; } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndexFields.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndexFields.java new file mode 100644 index 00000000000..e2db2428527 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/AnomalyDetectorsIndexFields.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.persistence; + +public final class AnomalyDetectorsIndexFields { + + public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; + public static final String STATE_INDEX_NAME = ".ml-state"; + public static final String RESULTS_INDEX_DEFAULT = "shared"; + + private AnomalyDetectorsIndexFields() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedResultsIterator.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedResultsIterator.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedResultsIterator.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedResultsIterator.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BucketsQueryBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BucketsQueryBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BucketsQueryBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BucketsQueryBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/CalendarQueryBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/CalendarQueryBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/CalendarQueryBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/CalendarQueryBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java index 6b2494ba140..6138e5a3160 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshotField; import org.elasticsearch.xpack.ml.job.results.AnomalyCause; import org.elasticsearch.xpack.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.ml.job.results.Bucket; @@ -608,7 +609,7 @@ public class ElasticsearchMappings { builder.startObject(ModelSnapshot.DESCRIPTION.getPreferredName()) .field(TYPE, TEXT) .endObject() - .startObject(ModelSnapshot.SNAPSHOT_ID.getPreferredName()) + .startObject(ModelSnapshotField.SNAPSHOT_ID.getPreferredName()) .field(TYPE, KEYWORD) .endObject() .startObject(ModelSnapshot.SNAPSHOT_DOC_COUNT.getPreferredName()) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/InfluencersQueryBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/InfluencersQueryBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/InfluencersQueryBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/InfluencersQueryBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java similarity index 96% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index e1dcbeb9b15..269ea86fe33 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -50,6 +50,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -81,10 +82,12 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshotField; import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.ml.job.results.Bucket; import org.elasticsearch.xpack.ml.job.results.CategoryDefinition; +import org.elasticsearch.xpack.ml.job.results.ForecastRequestStats; import org.elasticsearch.xpack.ml.job.results.Influencer; import org.elasticsearch.xpack.ml.job.results.ModelPlot; import org.elasticsearch.xpack.ml.job.results.Result; @@ -475,6 +478,18 @@ public class JobProvider { } } + private T parseGetHit(GetResponse getResponse, BiFunction objectParser, + Consumer errorHandler) { + BytesReference source = getResponse.getSourceAsBytesRef(); + + try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, source)) { + return objectParser.apply(parser, null); + } catch (IOException e) { + errorHandler.accept(new ElasticsearchParseException("failed to parse " + getResponse.getType(), e)); + return null; + } + } + public static IndicesOptions addIgnoreUnavailable(IndicesOptions indicesOptions) { return IndicesOptions.fromOptions(true, indicesOptions.allowNoIndices(), indicesOptions.expandWildcardsOpen(), indicesOptions.expandWildcardsClosed(), indicesOptions); @@ -792,7 +807,7 @@ public class JobProvider { Consumer errorHandler) { ResultsFilterBuilder fb = new ResultsFilterBuilder(); if (snapshotId != null && !snapshotId.isEmpty()) { - fb.term(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId); + fb.term(ModelSnapshotField.SNAPSHOT_ID.getPreferredName(), snapshotId); } QueryBuilder qb = fb.timeRange(Result.TIMESTAMP.getPreferredName(), startEpochMs, endEpochMs).build(); @@ -904,6 +919,19 @@ public class JobProvider { ), client::search); } + private void getResult(String jobId, String resultDescription, GetRequest get, BiFunction objectParser, + Consumer> handler, Consumer errorHandler, Supplier notFoundSupplier) { + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, get, ActionListener.wrap(getDocResponse -> { + if (getDocResponse.isExists()) { + handler.accept(new Result<>(getDocResponse.getIndex(), parseGetHit(getDocResponse, objectParser, errorHandler))); + } else { + LOGGER.trace("No {} for job with id {}", resultDescription, jobId); + handler.accept(new Result<>(null, notFoundSupplier.get())); + } + }, errorHandler), client::get); + } + private SearchRequestBuilder createLatestModelSizeStatsSearch(String indexName) { return client.prepareSearch(indexName) .setSize(1) @@ -1043,6 +1071,16 @@ public class JobProvider { , client::search); } + public void getForecastRequestStats(String jobId, String forecastId, Consumer handler, + Consumer errorHandler) { + String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); + GetRequest getRequest = new GetRequest(indexName, ElasticsearchMappings.DOC_TYPE, + ForecastRequestStats.documentId(jobId, forecastId)); + + getResult(jobId, ForecastRequestStats.RESULTS_FIELD.getPreferredName(), getRequest, ForecastRequestStats.PARSER, + result -> handler.accept(result.result), errorHandler, () -> null); + } + public void updateCalendar(String calendarId, Set jobIdsToAdd, Set jobIdsToRemove, Consumer handler, Consumer errorHandler) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java similarity index 98% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java index 67558c0283d..a2749064309 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java @@ -46,6 +46,10 @@ import java.util.function.Consumer; import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +/* + Moving this class to plugin-core caused a *lot* of server side logic to be pulled in to plugin-core. This should be considered as needing + refactoring to move it back to core. See DeleteJobAction for its use. +*/ public class JobStorageDeletionTask extends Task { private final Logger logger; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/RecordsQueryBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/RecordsQueryBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/RecordsQueryBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/RecordsQueryBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ResultsFilterBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ResultsFilterBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ResultsFilterBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ResultsFilterBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/SpecialEventsQueryBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/SpecialEventsQueryBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/SpecialEventsQueryBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/persistence/SpecialEventsQueryBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushAcknowledgement.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushAcknowledgement.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushAcknowledgement.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushAcknowledgement.java index 2d2c9e7c77c..5659efe3f58 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushAcknowledgement.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushAcknowledgement.java @@ -26,7 +26,7 @@ import java.util.Objects; */ public class FlushAcknowledgement implements ToXContentObject, Writeable { /** - * Field Names + * ThrottlerField Names */ public static final ParseField TYPE = new ParseField("flush"); public static final ParseField ID = new ParseField("id"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/AutodetectParams.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/AutodetectParams.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/AutodetectParams.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/AutodetectParams.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/CategorizerState.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/CategorizerState.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/CategorizerState.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/CategorizerState.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/DataCounts.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/DataCounts.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/DataCounts.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/DataCounts.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSizeStats.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSizeStats.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSizeStats.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSizeStats.java index 2f5d96ea986..a6b565d3e4e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSizeStats.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSizeStats.java @@ -35,7 +35,7 @@ public class ModelSizeStats implements ToXContentObject, Writeable { public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); /** - * Field Names + * ThrottlerField Names */ public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes"); public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshot.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshot.java similarity index 98% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshot.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshot.java index 38492850c73..7da9b2259c8 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshot.java @@ -28,17 +28,15 @@ import java.util.Date; import java.util.List; import java.util.Objects; - /** * ModelSnapshot Result POJO */ public class ModelSnapshot implements ToXContentObject, Writeable { /** - * Field Names + * ThrottlerField Names */ public static final ParseField TIMESTAMP = new ParseField("timestamp"); public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshot_doc_count"); public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_time_stamp"); public static final ParseField LATEST_RESULT_TIME = new ParseField("latest_result_time_stamp"); @@ -66,7 +64,7 @@ public class ModelSnapshot implements ToXContentObject, Writeable { throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + TIMESTAMP.getPreferredName() + "]"); }, TIMESTAMP, ValueType.VALUE); PARSER.declareString(Builder::setDescription, DESCRIPTION); - PARSER.declareString(Builder::setSnapshotId, SNAPSHOT_ID); + PARSER.declareString(Builder::setSnapshotId, ModelSnapshotField.SNAPSHOT_ID); PARSER.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT); PARSER.declareObject(Builder::setModelSizeStats, ModelSizeStats.PARSER, ModelSizeStats.RESULT_TYPE_FIELD); PARSER.declareField(Builder::setLatestRecordTimeStamp, p -> { @@ -170,7 +168,7 @@ public class ModelSnapshot implements ToXContentObject, Writeable { builder.field(DESCRIPTION.getPreferredName(), description); } if (snapshotId != null) { - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); + builder.field(ModelSnapshotField.SNAPSHOT_ID.getPreferredName(), snapshotId); } builder.field(SNAPSHOT_DOC_COUNT.getPreferredName(), snapshotDocCount); if (modelSizeStats != null) { diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshotField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshotField.java new file mode 100644 index 00000000000..25767179936 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelSnapshotField.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.process.autodetect.state; + +import org.elasticsearch.common.ParseField; + +public final class ModelSnapshotField { + + public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); + + private ModelSnapshotField() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelState.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelState.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelState.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/ModelState.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/Quantiles.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/Quantiles.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/Quantiles.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/Quantiles.java index f0696b4ad0c..9fcf29ceb1d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/Quantiles.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/state/Quantiles.java @@ -25,7 +25,7 @@ import java.util.Objects; public class Quantiles implements ToXContentObject, Writeable { /** - * Field Names + * ThrottlerField Names */ public static final ParseField TIMESTAMP = new ParseField("timestamp"); public static final ParseField QUANTILE_STATE = new ParseField("quantile_state"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/RecordWriter.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/RecordWriter.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/RecordWriter.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/RecordWriter.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/AnomalyCause.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/AnomalyCause.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/AnomalyCause.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/AnomalyCause.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/AnomalyRecord.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/AnomalyRecord.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/AnomalyRecord.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/AnomalyRecord.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Bucket.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Bucket.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Bucket.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Bucket.java index 1cda0abb4c8..71ae54f0ae5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Bucket.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Bucket.java @@ -32,7 +32,7 @@ import java.util.Optional; */ public class Bucket implements ToXContentObject, Writeable { /* - * Field Names + * ThrottlerField Names */ private static final ParseField JOB_ID = Job.ID; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/BucketInfluencer.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/BucketInfluencer.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/BucketInfluencer.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/BucketInfluencer.java index a2ff5ce3b4e..f6d0686181e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/BucketInfluencer.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/BucketInfluencer.java @@ -31,7 +31,7 @@ public class BucketInfluencer implements ToXContentObject, Writeable { public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); /** - * Field names + * ThrottlerField names */ public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinition.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinition.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinition.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinition.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Forecast.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Forecast.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Forecast.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Forecast.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/ForecastRequestStats.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/ForecastRequestStats.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/ForecastRequestStats.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/ForecastRequestStats.java index 5bd9a9f90e6..475fec1bcab 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/ForecastRequestStats.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/ForecastRequestStats.java @@ -198,6 +198,10 @@ public class ForecastRequestStats implements ToXContentObject, Writeable { return forecastId; } + public static String documentId(String jobId, String forecastId) { + return jobId + "_model_forecast_request_stats_" + forecastId; + } + /** * Return the document ID used for indexing. As there is 1 and only 1 document * per forecast request, the id has no dynamic parts. @@ -205,7 +209,7 @@ public class ForecastRequestStats implements ToXContentObject, Writeable { * @return id */ public String getId() { - return jobId + "_model_forecast_request_stats_" + forecastId; + return documentId(jobId, forecastId); } public void setRecordCount(long recordCount) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Influence.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Influence.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Influence.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Influence.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Influencer.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Influencer.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Influencer.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Influencer.java index 55452ba3d5e..ec725a96576 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Influencer.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Influencer.java @@ -31,7 +31,7 @@ public class Influencer implements ToXContentObject, Writeable { public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); /* - * Field names + * ThrottlerField names */ public static final ParseField PROBABILITY = new ParseField("probability"); public static final ParseField SEQUENCE_NUM = new ParseField("sequence_num"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/ModelPlot.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/ModelPlot.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/ModelPlot.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/ModelPlot.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/OverallBucket.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/OverallBucket.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/OverallBucket.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/OverallBucket.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/PartitionScore.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/PartitionScore.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/PartitionScore.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/PartitionScore.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNames.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNames.java similarity index 98% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNames.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNames.java index d65c9a68612..f84f1c9d653 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNames.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/ReservedFieldNames.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshotField; import java.util.Arrays; import java.util.HashSet; @@ -150,7 +151,7 @@ public final class ReservedFieldNames { ModelSizeStats.LOG_TIME_FIELD.getPreferredName(), ModelSnapshot.DESCRIPTION.getPreferredName(), - ModelSnapshot.SNAPSHOT_ID.getPreferredName(), + ModelSnapshotField.SNAPSHOT_ID.getPreferredName(), ModelSnapshot.SNAPSHOT_DOC_COUNT.getPreferredName(), ModelSnapshot.LATEST_RECORD_TIME.getPreferredName(), ModelSnapshot.LATEST_RESULT_TIME.getPreferredName(), diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Result.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Result.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/job/results/Result.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/job/results/Result.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/notifications/AuditMessage.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/notifications/AuditMessage.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/notifications/AuditMessage.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/notifications/AuditMessage.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/notifications/Level.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/notifications/Level.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/notifications/Level.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/notifications/Level.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionsHelper.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionsHelper.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionsHelper.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionsHelper.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/utils/Intervals.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/Intervals.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/utils/Intervals.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/Intervals.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/utils/MlStrings.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/MlStrings.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/utils/MlStrings.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/MlStrings.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/utils/NameResolver.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/NameResolver.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/utils/NameResolver.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/NameResolver.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/utils/ToXContentParams.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/ToXContentParams.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/utils/ToXContentParams.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/ToXContentParams.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/utils/time/DateTimeFormatterTimestampConverter.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/time/DateTimeFormatterTimestampConverter.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/utils/time/DateTimeFormatterTimestampConverter.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/time/DateTimeFormatterTimestampConverter.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/utils/time/TimeUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/time/TimeUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/utils/time/TimeUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/time/TimeUtils.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/utils/time/TimestampConverter.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/time/TimestampConverter.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/utils/time/TimestampConverter.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ml/utils/time/TimestampConverter.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/MonitoredSystem.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/MonitoredSystem.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/MonitoredSystem.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/MonitoredSystem.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringField.java new file mode 100644 index 00000000000..32924dfae88 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringField.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.TimeValue; + +import static org.elasticsearch.common.settings.Setting.timeSetting; + +public final class MonitoringField { + + /** + * The minimum amount of time allowed for the history duration. + */ + public static final TimeValue HISTORY_DURATION_MINIMUM = TimeValue.timeValueHours(24); + /** + * The default retention duration of the monitoring history data. + *

    + * Expected values: + *

      + *
    • Default: 7 days
    • + *
    • Minimum: 1 day
    • + *
    + * + * @see MonitoringField#HISTORY_DURATION_MINIMUM + */ + public static final Setting HISTORY_DURATION = timeSetting("xpack.monitoring.history.duration", + TimeValue.timeValueHours(7 * 24), // default value (7 days) + HISTORY_DURATION_MINIMUM, // minimum value + Setting.Property.Dynamic, Setting.Property.NodeScope); + + private MonitoringField() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkDoc.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkDoc.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkDoc.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkDoc.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringIndex.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringIndex.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringIndex.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/action/MonitoringIndex.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringDoc.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringDoc.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringDoc.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringDoc.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/AllocatedPersistentTask.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/AllocatedPersistentTask.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/AllocatedPersistentTask.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/AllocatedPersistentTask.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/CompletionPersistentTaskAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/CompletionPersistentTaskAction.java similarity index 98% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/CompletionPersistentTaskAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/CompletionPersistentTaskAction.java index 85ab3b736bf..c88a01bafd4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/persistent/CompletionPersistentTaskAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/CompletionPersistentTaskAction.java @@ -157,7 +157,8 @@ public class CompletionPersistentTaskAction extends Action listener) { + protected final void masterOperation(final Request request, ClusterState state, + final ActionListener listener) { persistentTasksClusterService.completePersistentTask(request.taskId, request.allocationId, request.exception, new ActionListener>() { @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/NodePersistentTasksExecutor.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/NodePersistentTasksExecutor.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/NodePersistentTasksExecutor.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/NodePersistentTasksExecutor.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTaskParams.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTaskParams.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTaskParams.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTaskParams.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTaskResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTaskResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTaskResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTaskResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksClusterService.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksClusterService.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksClusterService.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksClusterService.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksCustomMetaData.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksCustomMetaData.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksCustomMetaData.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksCustomMetaData.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksExecutor.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksExecutor.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksExecutor.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksExecutor.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksExecutorRegistry.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksExecutorRegistry.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksExecutorRegistry.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksExecutorRegistry.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksNodeService.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksNodeService.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksNodeService.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksNodeService.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksService.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksService.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksService.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksService.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/RemovePersistentTaskAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/RemovePersistentTaskAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/RemovePersistentTaskAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/RemovePersistentTaskAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/StartPersistentTaskAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/StartPersistentTaskAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/StartPersistentTaskAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/StartPersistentTaskAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/UpdatePersistentTaskStatusAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/UpdatePersistentTaskStatusAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/UpdatePersistentTaskStatusAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/UpdatePersistentTaskStatusAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/package-info.java b/plugin/core/src/main/java/org/elasticsearch/xpack/persistent/package-info.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/persistent/package-info.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/persistent/package-info.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityExtension.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/SecurityExtension.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/SecurityExtension.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/SecurityExtension.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/security/SecurityField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/SecurityField.java new file mode 100644 index 00000000000..5ae55fd8c73 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/SecurityField.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.elasticsearch.xpack.XpackField; + +public final class SecurityField { + + private SecurityField() {} + + public static String setting(String setting) { + assert setting != null && setting.startsWith(".") == false; + return settingPrefix() + setting; + } + + public static String settingPrefix() { + return XpackField.featureSettingPrefix(XpackField.SECURITY) + "."; + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/realm/ClearRealmCacheResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/ClearRolesCacheResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/DeleteRoleResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/GetRolesResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/role/PutRoleResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingAction.java similarity index 78% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingAction.java index 782c0a4c995..b370df618d2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingAction.java @@ -7,13 +7,10 @@ package org.elasticsearch.xpack.security.action.rolemapping; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.xpack.security.action.role.DeleteRoleRequest; -import org.elasticsearch.xpack.security.action.role.DeleteRoleRequestBuilder; -import org.elasticsearch.xpack.security.action.role.DeleteRoleResponse; /** * Action for deleting a role-mapping from the - * {@link org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore} + * org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class DeleteRoleMappingAction extends Action { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequest.java similarity index 93% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequest.java index 33b9b51e692..0665b1ea4c2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequest.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import static org.elasticsearch.action.ValidateActions.addValidationError; /** - * A request delete a role-mapping from the {@link org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore} + * A request delete a role-mapping from the org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class DeleteRoleMappingRequest extends ActionRequest implements WriteRequest { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequestBuilder.java similarity index 92% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequestBuilder.java index 6f7ac0d9f07..651b3c883d5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequestBuilder.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingRequestBuilder.java @@ -11,7 +11,7 @@ import org.elasticsearch.client.ElasticsearchClient; /** * A builder for requests to delete a role-mapping from the - * {@link org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore} + * org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class DeleteRoleMappingRequestBuilder extends ActionRequestBuilder diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingResponse.java similarity index 95% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingResponse.java index 320a396e55b..b05fe3ba6b9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingResponse.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/DeleteRoleMappingResponse.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; /** * Response for a role-mapping being deleted from the - * {@link org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore} + * org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class DeleteRoleMappingResponse extends ActionResponse implements ToXContentObject { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsAction.java similarity index 93% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsAction.java index ade88d640aa..544240e3c30 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.client.ElasticsearchClient; /** * Action to retrieve one or more role-mappings from X-Pack security - * @see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore + * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class GetRoleMappingsAction extends Action { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequest.java similarity index 95% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequest.java index c01a1a6e939..ba39d141b39 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequest.java @@ -18,7 +18,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Request to retrieve role-mappings from X-Pack security * - * @see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore + * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class GetRoleMappingsRequest extends ActionRequest { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequestBuilder.java similarity index 91% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequestBuilder.java index 2b8b293914a..f563e5df423 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequestBuilder.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsRequestBuilder.java @@ -11,7 +11,7 @@ import org.elasticsearch.client.ElasticsearchClient; /** * Builder for a request to retrieve role-mappings from X-Pack security * - * @see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore + * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class GetRoleMappingsRequestBuilder extends ActionRequestBuilder { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsResponse.java similarity index 94% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsResponse.java index d46765372be..fde9500e003 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsResponse.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/GetRoleMappingsResponse.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.security.authc.support.mapper.ExpressionRoleMappi /** * Response to {@link GetRoleMappingsAction get role-mappings API}. * - * @see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore + * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class GetRoleMappingsResponse extends ActionResponse { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingAction.java similarity index 84% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingAction.java index aee7bbb585f..6c1fd8a7433 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingAction.java @@ -7,9 +7,6 @@ package org.elasticsearch.xpack.security.action.rolemapping; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.xpack.security.action.role.PutRoleRequest; -import org.elasticsearch.xpack.security.action.role.PutRoleRequestBuilder; -import org.elasticsearch.xpack.security.action.role.PutRoleResponse; /** * Action for adding a role to the security index diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequest.java similarity index 98% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequest.java index 6f014333c60..35b1cdd85c1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequest.java @@ -27,7 +27,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Request object for adding/updating a role-mapping to the native store * - * @see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore + * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class PutRoleMappingRequest extends ActionRequest implements WriteRequest { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequestBuilder.java similarity index 96% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequestBuilder.java index e6e05c58a8d..bbdd596284f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequestBuilder.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingRequestBuilder.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.security.authc.support.mapper.expressiondsl.RoleM /** * Builder for requests to add/update a role-mapping to the native store * - * @see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore + * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class PutRoleMappingRequestBuilder extends ActionRequestBuilder implements diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingResponse.java similarity index 94% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingResponse.java index fafd5f15602..560e5fb9f18 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingResponse.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/PutRoleMappingResponse.java @@ -17,7 +17,7 @@ import java.io.IOException; /** * Response when adding/updating a role-mapping. * - * @see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore + * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class PutRoleMappingResponse extends ActionResponse implements ToXContentObject { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/CreateTokenResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/token/InvalidateTokenResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/AuthenticateResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/DeleteUserResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/GetUsersResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/PutUserResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/UserRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/UserRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/action/user/UserRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/action/user/UserRequest.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationField.java new file mode 100644 index 00000000000..b9769f5ba30 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationField.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc; + +public final class AuthenticationField { + + public static final String AUTHENTICATION_KEY = "_xpack_security_authentication"; + + private AuthenticationField() {} +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceField.java new file mode 100644 index 00000000000..1c9e167deda --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceField.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc; + +import org.elasticsearch.common.settings.Setting; + +import static org.elasticsearch.xpack.security.SecurityField.setting; + +public final class AuthenticationServiceField { + + public static final Setting RUN_AS_ENABLED = + Setting.boolSetting(setting("authc.run_as.enabled"), true, Setting.Property.NodeScope); + public static final String RUN_AS_USER_HEADER = "es-security-runas-user"; + + private AuthenticationServiceField() {} +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ClientReservedRealm.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ClientReservedRealm.java new file mode 100644 index 00000000000..4e5e72d0c3b --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ClientReservedRealm.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.esnative; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.security.user.AnonymousUser; +import org.elasticsearch.xpack.security.user.UsernamesField; + +public class ClientReservedRealm { + + public static boolean isReserved(String username, Settings settings) { + assert username != null; + switch (username) { + case UsernamesField.ELASTIC_NAME: + case UsernamesField.KIBANA_NAME: + case UsernamesField.LOGSTASH_NAME: + return XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); + default: + return AnonymousUser.isAnonymousUsername(username, settings); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/BCrypt.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/BCrypt.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/BCrypt.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/BCrypt.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/CharArrays.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/CharArrays.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/CharArrays.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/CharArrays.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/Hasher.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/Hasher.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/Hasher.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/Hasher.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMapping.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMapping.java similarity index 96% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMapping.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMapping.java index 4da46b90de3..2ee3e2963f1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMapping.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMapping.java @@ -27,7 +27,7 @@ import org.elasticsearch.xpack.security.authc.support.mapper.expressiondsl.RoleM import org.elasticsearch.xpack.security.authc.support.mapper.expressiondsl.ExpressionParser; /** - * A representation of a single role-mapping for use in {@link NativeRoleMappingStore}. + * A representation of a single role-mapping for use in NativeRoleMappingStore. * Logically, this represents a set of roles that should be applied to any user where a boolean * expression evaluates to true. * @@ -52,7 +52,7 @@ public class ExpressionRoleMapping implements ToXContentObject, Writeable { BiConsumer ignored = (b, v) -> { }; // skip the doc_type and type fields in case we're parsing directly from the index - PARSER.declareString(ignored, new ParseField(NativeRoleMappingStore.DOC_TYPE_FIELD)); + PARSER.declareString(ignored, new ParseField(NativeRoleMappingStoreField.DOC_TYPE_FIELD)); PARSER.declareString(ignored, new ParseField(UPGRADE_API_TYPE_FIELD)); } @@ -99,7 +99,7 @@ public class ExpressionRoleMapping implements ToXContentObject, Writeable { /** * The expression that determines whether the roles in this mapping should be applied to any given user. * If the expression {@link RoleMapperExpression#match(Map) matches} a - * {@link org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData user}, then the user should be assigned this mapping's + * org.elasticsearch.xpack.security.authc.support.UserRoleMapper.UserData user, then the user should be assigned this mapping's * {@link #getRoles() roles} */ public RoleMapperExpression getExpression() { @@ -180,7 +180,7 @@ public class ExpressionRoleMapping implements ToXContentObject, Writeable { builder.field(Fields.METADATA.getPreferredName(), metadata); if (includeDocType) { - builder.field(NativeRoleMappingStore.DOC_TYPE_FIELD, NativeRoleMappingStore.DOC_TYPE_ROLE_MAPPING); + builder.field(NativeRoleMappingStoreField.DOC_TYPE_FIELD, NativeRoleMappingStoreField.DOC_TYPE_ROLE_MAPPING); } return builder.endObject(); } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreField.java new file mode 100644 index 00000000000..ef363eeaab1 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreField.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support.mapper; + +public final class NativeRoleMappingStoreField { + + public static final String DOC_TYPE_FIELD = "doc_type"; + public static final String DOC_TYPE_ROLE_MAPPING = "role-mapping"; + public static final String ID_PREFIX = DOC_TYPE_ROLE_MAPPING + "_"; + public static final String SECURITY_GENERIC_TYPE = "doc"; + + private NativeRoleMappingStoreField() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/AllExpression.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/AllExpression.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/AllExpression.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/AllExpression.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/AnyExpression.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/AnyExpression.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/AnyExpression.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/AnyExpression.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/ExceptExpression.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/ExceptExpression.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/ExceptExpression.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/ExceptExpression.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/ExpressionParser.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/ExpressionParser.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/ExpressionParser.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/ExpressionParser.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/FieldExpression.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/FieldExpression.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/FieldExpression.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/FieldExpression.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/RoleMapperExpression.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/RoleMapperExpression.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/RoleMapperExpression.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/expressiondsl/RoleMapperExpression.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java index ec0e57c3aeb..b8fecef5a7e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authz/RoleDescriptor.java @@ -313,11 +313,11 @@ public class RoleDescriptor implements ToXContentObject { } if (indexPrivileges != null) { if (Arrays.stream(indexPrivileges).anyMatch(IndicesPrivileges::isUsingFieldLevelSecurity)) { - throw new ElasticsearchParseException("Field [{}] is not supported in a has_privileges request", + throw new ElasticsearchParseException("ThrottlerField [{}] is not supported in a has_privileges request", RoleDescriptor.Fields.FIELD_PERMISSIONS); } if (Arrays.stream(indexPrivileges).anyMatch(IndicesPrivileges::isUsingDocumentLevelSecurity)) { - throw new ElasticsearchParseException("Field [{}] is not supported in a has_privileges request", Fields.QUERY); + throw new ElasticsearchParseException("ThrottlerField [{}] is not supported in a has_privileges request", Fields.QUERY); } } return new RoleDescriptor(description, clusterPrivileges, indexPrivileges, null); diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/security/authz/store/ClientReservedRoles.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authz/store/ClientReservedRoles.java new file mode 100644 index 00000000000..b31777d2e6a --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/authz/store/ClientReservedRoles.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.store; + +import org.elasticsearch.xpack.security.SecurityExtension; +import org.elasticsearch.xpack.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.support.MetadataUtils; +import org.elasticsearch.xpack.security.user.UsernamesField; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.ServiceLoader; + +public class ClientReservedRoles { + + public static final RoleDescriptor SUPERUSER_ROLE_DESCRIPTOR = new RoleDescriptor("superuser", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build()}, + new String[] { "*" }, + MetadataUtils.DEFAULT_RESERVED_METADATA); + static final Map RESERVED_ROLES = initializeReservedRoles(); + + static Map initializeReservedRoles() { + Map roles = new HashMap<>(); + + roles.put("superuser", SUPERUSER_ROLE_DESCRIPTOR); + + // Services are loaded through SPI, and are defined in their META-INF/services + for(SecurityExtension ext : ServiceLoader.load(SecurityExtension.class, SecurityExtension.class.getClassLoader())) { + roles.putAll(ext.getReservedRoles()); + } + + return Collections.unmodifiableMap(roles); + } + + public static boolean isReserved(String role) { + return RESERVED_ROLES.containsKey(role) || UsernamesField.SYSTEM_ROLE.equals(role) || UsernamesField.XPACK_ROLE.equals(role); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/common/xcontent/XContentUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/common/xcontent/XContentUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/common/xcontent/XContentUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/common/xcontent/XContentUtils.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/support/Exceptions.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/support/Exceptions.java similarity index 81% rename from plugin/src/main/java/org/elasticsearch/xpack/security/support/Exceptions.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/support/Exceptions.java index 08845541069..5995cda0386 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/support/Exceptions.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/support/Exceptions.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.security.support; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; public class Exceptions { @@ -16,13 +16,13 @@ public class Exceptions { public static ElasticsearchSecurityException authenticationError(String msg, Throwable cause, Object... args) { ElasticsearchSecurityException e = new ElasticsearchSecurityException(msg, RestStatus.UNAUTHORIZED, cause, args); - e.addHeader("WWW-Authenticate", "Basic realm=\"" + XPackPlugin.SECURITY + "\" charset=\"UTF-8\""); + e.addHeader("WWW-Authenticate", "Basic realm=\"" + XpackField.SECURITY + "\" charset=\"UTF-8\""); return e; } public static ElasticsearchSecurityException authenticationError(String msg, Object... args) { ElasticsearchSecurityException e = new ElasticsearchSecurityException(msg, RestStatus.UNAUTHORIZED, args); - e.addHeader("WWW-Authenticate", "Basic realm=\"" + XPackPlugin.SECURITY + "\" charset=\"UTF-8\""); + e.addHeader("WWW-Authenticate", "Basic realm=\"" + XpackField.SECURITY + "\" charset=\"UTF-8\""); return e; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/support/MetadataUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/support/MetadataUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/security/support/MetadataUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/support/MetadataUtils.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/support/Validation.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/support/Validation.java similarity index 93% rename from plugin/src/main/java/org/elasticsearch/xpack/security/support/Validation.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/support/Validation.java index 497638bf049..e5ce013aed8 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/support/Validation.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/support/Validation.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.security.support; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.authc.esnative.ClientReservedRealm; +import org.elasticsearch.xpack.security.authz.store.ClientReservedRoles; -import java.util.HashSet; import java.util.Locale; import java.util.Set; @@ -76,7 +75,7 @@ public final class Validation { if (!isValidUserOrRoleName(username)) { return new Error(String.format(Locale.ROOT, INVALID_NAME_MESSAGE, "User")); } - if (allowReserved == false && ReservedRealm.isReserved(username, settings)) { + if (allowReserved == false && ClientReservedRealm.isReserved(username, settings)) { return new Error("Username [" + username + "] is reserved and may not be used."); } return null; @@ -100,7 +99,7 @@ public final class Validation { if (!isValidUserOrRoleName(roleName)) { return new Error(String.format(Locale.ROOT, INVALID_NAME_MESSAGE, "Role")); } - if (allowReserved == false && ReservedRolesStore.isReserved(roleName)) { + if (allowReserved == false && ClientReservedRoles.isReserved(roleName)) { return new Error("Role [" + roleName + "] is reserved and may not be used."); } return null; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java similarity index 96% rename from plugin/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java index 1b6545fcf4d..8130e91d55b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/user/AnonymousUser.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.security.support.MetadataUtils; import java.util.Collections; import java.util.List; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; /** * The user object for the anonymous user. diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/User.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/user/User.java similarity index 85% rename from plugin/src/main/java/org/elasticsearch/xpack/security/user/User.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/security/user/User.java index 1fecc61aa75..2e656cefea3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/User.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/user/User.java @@ -176,19 +176,7 @@ public class User implements ToXContentObject { return builder.endObject(); } - public static User readFrom(StreamInput input) throws IOException { - final boolean isInternalUser = input.readBoolean(); - final String username = input.readString(); - if (isInternalUser) { - if (SystemUser.is(username)) { - return SystemUser.INSTANCE; - } else if (XPackUser.is(username)) { - return XPackUser.INSTANCE; - } else if (XPackSecurityUser.is(username)) { - return XPackSecurityUser.INSTANCE; - } - throw new IllegalStateException("user [" + username + "] is not an internal user"); - } + public static User partialReadFrom(String username, StreamInput input) throws IOException { String[] roles = input.readStringArray(); Map metadata = input.readMap(); String fullName = input.readOptionalString(); @@ -209,32 +197,28 @@ public class User implements ToXContentObject { } } + public static User readFrom(StreamInput input) throws IOException { + final boolean isInternalUser = input.readBoolean(); + assert isInternalUser == false: "should always return false. Internal users should use the InternalUserSerializationHelper"; + final String username = input.readString(); + return partialReadFrom(username, input); + } + public static void writeTo(User user, StreamOutput output) throws IOException { - if (SystemUser.is(user)) { + if (user.authenticatedUser == null) { + // no backcompat necessary, since there is no inner user + writeUser(user, output); + } else if (output.getVersion().onOrBefore(Version.V_5_4_0)) { + // backcompat: write runas user as the "inner" user + writeUser(user.authenticatedUser, output); output.writeBoolean(true); - output.writeString(SystemUser.NAME); - } else if (XPackUser.is(user)) { - output.writeBoolean(true); - output.writeString(XPackUser.NAME); - } else if (XPackSecurityUser.is(user)) { - output.writeBoolean(true); - output.writeString(XPackSecurityUser.NAME); + writeUser(user, output); } else { - if (user.authenticatedUser == null) { - // no backcompat necessary, since there is no inner user - writeUser(user, output); - } else if (output.getVersion().onOrBefore(Version.V_5_4_0)) { - // backcompat: write runas user as the "inner" user - writeUser(user.authenticatedUser, output); - output.writeBoolean(true); - writeUser(user, output); - } else { - writeUser(user, output); - output.writeBoolean(true); - writeUser(user.authenticatedUser, output); - } - output.writeBoolean(false); // last user written, regardless of bwc, does not have an inner user + writeUser(user, output); + output.writeBoolean(true); + writeUser(user.authenticatedUser, output); } + output.writeBoolean(false); // last user written, regardless of bwc, does not have an inner user } /** Write just the given {@link User}, but not the inner {@link #authenticatedUser}. */ @@ -260,3 +244,4 @@ public class User implements ToXContentObject { ParseField TYPE = new ParseField("type"); } } + diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/security/user/UsernamesField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/security/user/UsernamesField.java new file mode 100644 index 00000000000..b5c2ba90c36 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/security/user/UsernamesField.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.user; + +public final class UsernamesField { + public static final String ELASTIC_NAME = "elastic"; + public static final String ELASTIC_ROLE = "superuser"; + public static final String KIBANA_NAME = "kibana"; + public static final String KIBANA_ROLE = "kibana_system"; + public static final String SYSTEM_NAME = "_system"; + public static final String SYSTEM_ROLE = "_system"; + public static final String XPACK_SECURITY_NAME = "_xpack_security"; + public static final String XPACK_SECURITY_ROLE = "superuser"; + public static final String XPACK_NAME = "_xpack"; + public static final String XPACK_ROLE = "_xpack"; + public static final String LOGSTASH_NAME = "logstash_system"; + public static final String LOGSTASH_ROLE = "logstash_system"; + + private UsernamesField() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLClientAuth.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ssl/SSLClientAuth.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLClientAuth.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ssl/SSLClientAuth.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java similarity index 99% rename from plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java index 4bb2d530d47..e8e828dbd04 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; /** - * Bridges {@link SSLConfiguration} into the {@link Settings} framework, using {@link Setting} objects. + * Bridges SSLConfiguration into the {@link Settings} framework, using {@link Setting} objects. */ public class SSLConfigurationSettings { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/VerificationMode.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ssl/VerificationMode.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/ssl/VerificationMode.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ssl/VerificationMode.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/action/GetCertificateInfoAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ssl/action/GetCertificateInfoAction.java similarity index 69% rename from plugin/src/main/java/org/elasticsearch/xpack/ssl/action/GetCertificateInfoAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ssl/action/GetCertificateInfoAction.java index cab927da963..83250884595 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/action/GetCertificateInfoAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ssl/action/GetCertificateInfoAction.java @@ -5,32 +5,22 @@ */ package org.elasticsearch.xpack.ssl.action; -import java.io.IOException; -import java.security.GeneralSecurityException; -import java.util.ArrayList; -import java.util.Collection; - import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ssl.SSLService; import org.elasticsearch.xpack.ssl.cert.CertificateInfo; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; + /** * Action to obtain information about X.509 (SSL/TLS) certificates that are being used by X-Pack. * The primary use case is for tracking the expiry dates of certificates. @@ -115,27 +105,4 @@ public class GetCertificateInfoAction } } - public static class TransportAction extends HandledTransportAction { - - private final SSLService sslService; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - SSLService sslService) { - super(settings, GetCertificateInfoAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, - Request::new); - this.sslService = sslService; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - try { - Collection certificates = sslService.getLoadedCertificates(); - listener.onResponse(new Response(certificates)); - } catch (GeneralSecurityException | IOException e) { - listener.onFailure(e); - } - } - } } \ No newline at end of file diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/cert/CertificateInfo.java b/plugin/core/src/main/java/org/elasticsearch/xpack/ssl/cert/CertificateInfo.java similarity index 97% rename from plugin/src/main/java/org/elasticsearch/xpack/ssl/cert/CertificateInfo.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/ssl/cert/CertificateInfo.java index 676114d568e..ff8b8df00de 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/cert/CertificateInfo.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/ssl/cert/CertificateInfo.java @@ -5,20 +5,18 @@ */ package org.elasticsearch.xpack.ssl.cert; -import java.io.IOException; -import java.security.cert.X509Certificate; -import java.util.Objects; - import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.ssl.CertUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import java.io.IOException; +import java.security.cert.X509Certificate; +import java.util.Objects; + /** * Simple model of an X.509 certificate that is known to X-Pack */ diff --git a/plugin/src/main/java/org/elasticsearch/xpack/template/TemplateUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/template/TemplateUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/template/TemplateUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/template/TemplateUtils.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceFields.java b/plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceFields.java new file mode 100644 index 00000000000..280b4e3d0b2 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceFields.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.upgrade; + +import org.elasticsearch.action.support.IndicesOptions; + +public final class IndexUpgradeServiceFields { + + public static final IndicesOptions UPGRADE_INDEX_OPTIONS = IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + + private IndexUpgradeServiceFields() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/UpgradeActionRequired.java b/plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/UpgradeActionRequired.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/upgrade/UpgradeActionRequired.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/UpgradeActionRequired.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java similarity index 65% rename from plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java index 6c3158a35a9..58486680817 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java @@ -6,37 +6,24 @@ package org.elasticsearch.xpack.upgrade.actions; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.upgrade.IndexUpgradeService; import java.io.IOException; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.xpack.upgrade.IndexUpgradeService.UPGRADE_INDEX_OPTIONS; +import static org.elasticsearch.xpack.upgrade.IndexUpgradeServiceFields.UPGRADE_INDEX_OPTIONS; public class IndexUpgradeAction extends Action { @@ -173,47 +160,4 @@ public class IndexUpgradeAction extends Action { - - private final IndexUpgradeService indexUpgradeService; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, - IndexUpgradeService indexUpgradeService, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, IndexUpgradeAction.NAME, transportService, clusterService, threadPool, actionFilters, - Request::new, indexNameExpressionResolver); - this.indexUpgradeService = indexUpgradeService; - } - - @Override - protected String executor() { - return ThreadPool.Names.GENERIC; - } - - @Override - protected BulkByScrollResponse newResponse() { - return new BulkByScrollResponse(); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - // Cluster is not affected but we look up repositories in metadata - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); - } - - @Override - protected final void masterOperation(Task task, Request request, ClusterState state, - ActionListener listener) { - TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); - indexUpgradeService.upgrade(taskId, request.index(), state, listener); - } - - @Override - protected final void masterOperation(Request request, ClusterState state, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required"); - } - - } } \ No newline at end of file diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java similarity index 70% rename from plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java index 35e9aa4b660..9a77165d35a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java @@ -6,33 +6,17 @@ package org.elasticsearch.xpack.upgrade.actions; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadRequest; -import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.license.LicenseUtils; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.upgrade.IndexUpgradeService; import org.elasticsearch.xpack.upgrade.UpgradeActionRequired; import java.io.IOException; @@ -214,49 +198,4 @@ public class IndexUpgradeInfoAction extends Action { - - private final IndexUpgradeService indexUpgradeService; - private final XPackLicenseState licenseState; - - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, - IndexUpgradeService indexUpgradeService, - IndexNameExpressionResolver indexNameExpressionResolver, - XPackLicenseState licenseState) { - super(settings, IndexUpgradeInfoAction.NAME, transportService, clusterService, threadPool, actionFilters, - Request::new, indexNameExpressionResolver); - this.indexUpgradeService = indexUpgradeService; - this.licenseState = licenseState; - } - - @Override - protected String executor() { - return ThreadPool.Names.GENERIC; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - // Cluster is not affected but we look up repositories in metadata - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); - } - - @Override - protected final void masterOperation(final Request request, ClusterState state, final ActionListener listener) { - if (licenseState.isUpgradeAllowed()) { - Map results = - indexUpgradeService.upgradeInfo(request.indices(), request.indicesOptions(), state); - listener.onResponse(new Response(results)); - } else { - listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.UPGRADE)); - } - } - } } \ No newline at end of file diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/WatcherField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/WatcherField.java new file mode 100644 index 00000000000..598caf1e7bf --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/WatcherField.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher; + +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.Setting; + +import java.io.InputStream; + +public final class WatcherField { + + public static final Setting ENCRYPTION_KEY_SETTING = + SecureSetting.secureFile("xpack.watcher.encryption_key", null); + + private WatcherField() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherMetaData.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/WatcherMetaData.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherMetaData.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/WatcherMetaData.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherState.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/WatcherState.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherState.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/WatcherState.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/Action.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/Action.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/Action.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/Action.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionFactory.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionFactory.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionFactory.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionFactory.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionRegistry.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionRegistry.java similarity index 95% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionRegistry.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionRegistry.java index 2312a49b7ed..74b6a9098c3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionRegistry.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionRegistry.java @@ -9,8 +9,8 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.watcher.condition.ConditionRegistry; +import org.elasticsearch.xpack.watcher.support.WatcherUtils; import org.elasticsearch.xpack.watcher.transform.TransformRegistry; -import org.elasticsearch.xpack.watcher.watch.Watch; import java.io.IOException; import java.time.Clock; @@ -53,7 +53,7 @@ public class ActionRegistry { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { id = parser.currentName(); - if (Watch.isValidId(id) == false) { + if (WatcherUtils.isValidId(id) == false) { throw new ElasticsearchParseException("could not parse action [{}] for watch [{}]. id contains whitespace", id, watchId); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionStatus.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionStatus.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionStatus.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionStatus.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java similarity index 66% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java index 61773a634ba..2a98a80bf7e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapper.java @@ -17,13 +17,15 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.watcher.actions.throttler.ActionThrottler; import org.elasticsearch.xpack.watcher.actions.throttler.Throttler; +import org.elasticsearch.xpack.watcher.actions.throttler.ThrottlerField; import org.elasticsearch.xpack.watcher.condition.Condition; +import org.elasticsearch.xpack.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.watcher.transform.ExecutableTransform; import org.elasticsearch.xpack.watcher.transform.Transform; import org.elasticsearch.xpack.watcher.watch.Payload; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchField; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -37,14 +39,14 @@ public class ActionWrapper implements ToXContentObject { private String id; @Nullable - private final Condition condition; + private final ExecutableCondition condition; @Nullable private final ExecutableTransform transform; private final ActionThrottler throttler; private final ExecutableAction action; public ActionWrapper(String id, ActionThrottler throttler, - @Nullable Condition condition, + @Nullable ExecutableCondition condition, @Nullable ExecutableTransform transform, ExecutableAction action) { this.id = id; @@ -58,7 +60,7 @@ public class ActionWrapper implements ToXContentObject { return id; } - public Condition condition() { + public ExecutableCondition condition() { return condition; } @@ -88,8 +90,8 @@ public class ActionWrapper implements ToXContentObject { * @param ctx The current watch's context * @return Never {@code null} */ - public ActionWrapper.Result execute(WatchExecutionContext ctx) { - ActionWrapper.Result result = ctx.actionsResults().get(id); + public ActionWrapperResult execute(WatchExecutionContext ctx) { + ActionWrapperResult result = ctx.actionsResults().get(id); if (result != null) { return result; } @@ -97,9 +99,9 @@ public class ActionWrapper implements ToXContentObject { Throttler.Result throttleResult = throttler.throttle(id, ctx); if (throttleResult.throttle()) { if (throttleResult.type() == Throttler.Type.ACK) { - return new ActionWrapper.Result(id, new Action.Result.Acknowledged(action.type(), throttleResult.reason())); + return new ActionWrapperResult(id, new Action.Result.Acknowledged(action.type(), throttleResult.reason())); } else { - return new ActionWrapper.Result(id, new Action.Result.Throttled(action.type(), throttleResult.reason())); + return new ActionWrapperResult(id, new Action.Result.Throttled(action.type(), throttleResult.reason())); } } } @@ -109,14 +111,14 @@ public class ActionWrapper implements ToXContentObject { conditionResult = condition.execute(ctx); if (conditionResult.met() == false) { ctx.watch().status().actionStatus(id).resetAckStatus(DateTime.now(DateTimeZone.UTC)); - return new ActionWrapper.Result(id, conditionResult, null, + return new ActionWrapperResult(id, conditionResult, null, new Action.Result.ConditionFailed(action.type(), "condition not met. skipping")); } } catch (RuntimeException e) { action.logger().error( (Supplier) () -> new ParameterizedMessage( "failed to execute action [{}/{}]. failed to execute condition", ctx.watch().id(), id), e); - return new ActionWrapper.Result(id, new Action.Result.ConditionFailed(action.type(), + return new ActionWrapperResult(id, new Action.Result.ConditionFailed(action.type(), "condition failed. skipping: {}", e.getMessage())); } } @@ -129,23 +131,23 @@ public class ActionWrapper implements ToXContentObject { action.logger().error("failed to execute action [{}/{}]. failed to transform payload. {}", ctx.watch().id(), id, transformResult.reason()); String msg = "Failed to transform payload"; - return new ActionWrapper.Result(id, conditionResult, transformResult, new Action.Result.Failure(action.type(), msg)); + return new ActionWrapperResult(id, conditionResult, transformResult, new Action.Result.Failure(action.type(), msg)); } payload = transformResult.payload(); } catch (Exception e) { action.logger().error( (Supplier) () -> new ParameterizedMessage( "failed to execute action [{}/{}]. failed to transform payload.", ctx.watch().id(), id), e); - return new ActionWrapper.Result(id, conditionResult, null, new Action.Result.FailureWithException(action.type(), e)); + return new ActionWrapperResult(id, conditionResult, null, new Action.Result.FailureWithException(action.type(), e)); } } try { Action.Result actionResult = action.execute(id, ctx, payload); - return new ActionWrapper.Result(id, conditionResult, transformResult, actionResult); + return new ActionWrapperResult(id, conditionResult, transformResult, actionResult); } catch (Exception e) { action.logger().error( (Supplier) () -> new ParameterizedMessage("failed to execute action [{}/{}]", ctx.watch().id(), id), e); - return new ActionWrapper.Result(id, new Action.Result.FailureWithException(action.type(), e)); + return new ActionWrapperResult(id, new Action.Result.FailureWithException(action.type(), e)); } } @@ -172,11 +174,11 @@ public class ActionWrapper implements ToXContentObject { builder.startObject(); TimeValue throttlePeriod = throttler.throttlePeriod(); if (throttlePeriod != null) { - builder.timeValueField(Throttler.Field.THROTTLE_PERIOD.getPreferredName(), - Throttler.Field.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); + builder.timeValueField(ThrottlerField.THROTTLE_PERIOD.getPreferredName(), + ThrottlerField.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); } if (condition != null) { - builder.startObject(Watch.Field.CONDITION.getPreferredName()) + builder.startObject(WatchField.CONDITION.getPreferredName()) .field(condition.type(), condition, params) .endObject(); } @@ -194,7 +196,7 @@ public class ActionWrapper implements ToXContentObject { assert parser.currentToken() == XContentParser.Token.START_OBJECT; - Condition condition = null; + ExecutableCondition condition = null; ExecutableTransform transform = null; TimeValue throttlePeriod = null; ExecutableAction action = null; @@ -205,15 +207,15 @@ public class ActionWrapper implements ToXContentObject { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { - if (Watch.Field.CONDITION.match(currentFieldName)) { + if (WatchField.CONDITION.match(currentFieldName)) { condition = actionRegistry.getConditionRegistry().parseExecutable(watchId, parser); } else if (Transform.TRANSFORM.match(currentFieldName)) { transform = actionRegistry.getTransformRegistry().parse(watchId, parser); - } else if (Throttler.Field.THROTTLE_PERIOD.match(currentFieldName)) { + } else if (ThrottlerField.THROTTLE_PERIOD.match(currentFieldName)) { throttlePeriod = timeValueMillis(parser.longValue()); - } else if (Throttler.Field.THROTTLE_PERIOD_HUMAN.match(currentFieldName)) { + } else if (ThrottlerField.THROTTLE_PERIOD_HUMAN.match(currentFieldName)) { try { - throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, Throttler.Field.THROTTLE_PERIOD_HUMAN.toString()); + throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, ThrottlerField.THROTTLE_PERIOD_HUMAN.toString()); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse action [{}/{}]. failed to parse field [{}] as time value", pe, watchId, actionId, currentFieldName); @@ -237,80 +239,4 @@ public class ActionWrapper implements ToXContentObject { return new ActionWrapper(actionId, throttler, condition, transform, action); } - public static class Result implements ToXContentObject { - - private final String id; - @Nullable - private final Condition.Result condition; - @Nullable - private final Transform.Result transform; - private final Action.Result action; - - public Result(String id, Action.Result action) { - this(id, null, null, action); - } - - public Result(String id, @Nullable Condition.Result condition, @Nullable Transform.Result transform, Action.Result action) { - this.id = id; - this.condition = condition; - this.transform = transform; - this.action = action; - } - - public String id() { - return id; - } - - public Condition.Result condition() { - return condition; - } - - public Transform.Result transform() { - return transform; - } - - public Action.Result action() { - return action; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Result result = (Result) o; - - return Objects.equals(id, result.id) && - Objects.equals(condition, result.condition) && - Objects.equals(transform, result.transform) && - Objects.equals(action, result.action); - } - - @Override - public int hashCode() { - return Objects.hash(id, condition, transform, action); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Field.ID.getPreferredName(), id); - builder.field(Field.TYPE.getPreferredName(), action.type()); - builder.field(Field.STATUS.getPreferredName(), action.status().value()); - if (condition != null) { - builder.field(Watch.Field.CONDITION.getPreferredName(), condition, params); - } - if (transform != null) { - builder.field(Transform.TRANSFORM.getPreferredName(), transform, params); - } - action.toXContent(builder, params); - return builder.endObject(); - } - } - - interface Field { - ParseField ID = new ParseField("id"); - ParseField TYPE = new ParseField("type"); - ParseField STATUS = new ParseField("status"); - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperField.java new file mode 100644 index 00000000000..385fe651c00 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperField.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.actions; + +import org.elasticsearch.common.ParseField; + +public final class ActionWrapperField { + public static final ParseField ID = new ParseField("id"); + public static final ParseField TYPE = new ParseField("type"); + public static final ParseField STATUS = new ParseField("status"); + + private ActionWrapperField() {} +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperResult.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperResult.java new file mode 100644 index 00000000000..47ea2cabc37 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperResult.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.actions; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.watcher.condition.Condition; +import org.elasticsearch.xpack.watcher.transform.Transform; +import org.elasticsearch.xpack.watcher.watch.WatchField; + +import java.io.IOException; +import java.util.Objects; + +public class ActionWrapperResult implements ToXContentObject { + + private final String id; + @Nullable + private final Condition.Result condition; + @Nullable + private final Transform.Result transform; + private final Action.Result action; + + public ActionWrapperResult(String id, Action.Result action) { + this(id, null, null, action); + } + + public ActionWrapperResult(String id, @Nullable Condition.Result condition, @Nullable Transform.Result transform, + Action.Result action) { + this.id = id; + this.condition = condition; + this.transform = transform; + this.action = action; + } + + public String id() { + return id; + } + + public Condition.Result condition() { + return condition; + } + + public Transform.Result transform() { + return transform; + } + + public Action.Result action() { + return action; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + ActionWrapperResult result = (ActionWrapperResult) o; + + return Objects.equals(id, result.id) && + Objects.equals(condition, result.condition) && + Objects.equals(transform, result.transform) && + Objects.equals(action, result.action); + } + + @Override + public int hashCode() { + return Objects.hash(id, condition, transform, action); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ActionWrapperField.ID.getPreferredName(), id); + builder.field(ActionWrapperField.TYPE.getPreferredName(), action.type()); + builder.field(ActionWrapperField.STATUS.getPreferredName(), action.status().value()); + if (condition != null) { + builder.field(WatchField.CONDITION.getPreferredName(), condition, params); + } + if (transform != null) { + builder.field(Transform.TRANSFORM.getPreferredName(), transform, params); + } + action.toXContent(builder, params); + return builder.endObject(); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ExecutableAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ExecutableAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/ExecutableAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/ExecutableAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/AckThrottler.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/AckThrottler.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/AckThrottler.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/AckThrottler.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottler.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottler.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottler.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottler.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/PeriodThrottler.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/PeriodThrottler.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/PeriodThrottler.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/PeriodThrottler.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java similarity index 91% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java index 9cbdb2073dc..9080314f1cb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/Throttler.java @@ -62,8 +62,4 @@ public interface Throttler { } } - interface Field { - ParseField THROTTLE_PERIOD = new ParseField("throttle_period_in_millis"); - ParseField THROTTLE_PERIOD_HUMAN = new ParseField("throttle_period"); - } } diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/ThrottlerField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/ThrottlerField.java new file mode 100644 index 00000000000..77833475f95 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/actions/throttler/ThrottlerField.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.actions.throttler; + +import org.elasticsearch.common.ParseField; + +public final class ThrottlerField { + public static final ParseField THROTTLE_PERIOD = new ParseField("throttle_period_in_millis"); + public static final ParseField THROTTLE_PERIOD_HUMAN = new ParseField("throttle_period"); + + private ThrottlerField() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java similarity index 88% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java index 10ac2d3549d..409cd1e406d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/client/WatchSourceBuilder.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.watcher.actions.Action; -import org.elasticsearch.xpack.watcher.actions.throttler.Throttler; +import org.elasticsearch.xpack.watcher.actions.throttler.ThrottlerField; import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; import org.elasticsearch.xpack.watcher.condition.Condition; import org.elasticsearch.xpack.watcher.input.Input; @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.watcher.support.Exceptions; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.watcher.transform.Transform; import org.elasticsearch.xpack.watcher.trigger.Trigger; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchField; import java.io.IOException; import java.util.HashMap; @@ -132,37 +132,37 @@ public class WatchSourceBuilder implements ToXContentObject { if (trigger == null) { throw Exceptions.illegalState("failed to build watch source. no trigger defined"); } - builder.startObject(Watch.Field.TRIGGER.getPreferredName()) + builder.startObject(WatchField.TRIGGER.getPreferredName()) .field(trigger.type(), trigger, params) .endObject(); - builder.startObject(Watch.Field.INPUT.getPreferredName()) + builder.startObject(WatchField.INPUT.getPreferredName()) .field(input.type(), input, params) .endObject(); - builder.startObject(Watch.Field.CONDITION.getPreferredName()) + builder.startObject(WatchField.CONDITION.getPreferredName()) .field(condition.type(), condition, params) .endObject(); if (transform != null) { - builder.startObject(Watch.Field.TRANSFORM.getPreferredName()) + builder.startObject(WatchField.TRANSFORM.getPreferredName()) .field(transform.type(), transform, params) .endObject(); } if (defaultThrottlePeriod != null) { - builder.timeValueField(Watch.Field.THROTTLE_PERIOD.getPreferredName(), - Watch.Field.THROTTLE_PERIOD_HUMAN.getPreferredName(), defaultThrottlePeriod); + builder.timeValueField(WatchField.THROTTLE_PERIOD.getPreferredName(), + WatchField.THROTTLE_PERIOD_HUMAN.getPreferredName(), defaultThrottlePeriod); } - builder.startObject(Watch.Field.ACTIONS.getPreferredName()); + builder.startObject(WatchField.ACTIONS.getPreferredName()); for (Map.Entry entry : actions.entrySet()) { builder.field(entry.getKey(), entry.getValue(), params); } builder.endObject(); if (metadata != null) { - builder.field(Watch.Field.METADATA.getPreferredName(), metadata); + builder.field(WatchField.METADATA.getPreferredName(), metadata); } return builder.endObject(); @@ -190,7 +190,7 @@ public class WatchSourceBuilder implements ToXContentObject { @Nullable private final Transform transform; TransformedAction(String id, Action action, @Nullable TimeValue throttlePeriod, - @Nullable Condition condition, @Nullable Transform transform) { + @Nullable Condition condition, @Nullable Transform transform) { this.id = id; this.throttlePeriod = throttlePeriod; this.condition = condition; @@ -202,11 +202,11 @@ public class WatchSourceBuilder implements ToXContentObject { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (throttlePeriod != null) { - builder.timeValueField(Throttler.Field.THROTTLE_PERIOD.getPreferredName(), - Throttler.Field.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); + builder.timeValueField(ThrottlerField.THROTTLE_PERIOD.getPreferredName(), + ThrottlerField.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); } if (condition != null) { - builder.startObject(Watch.Field.CONDITION.getPreferredName()) + builder.startObject(WatchField.CONDITION.getPreferredName()) .field(condition.type(), condition, params) .endObject(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/common/secret/Secret.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/common/secret/Secret.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/common/secret/Secret.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/common/secret/Secret.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/common/xcontent/XContentUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/common/xcontent/XContentUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/common/xcontent/XContentUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/common/xcontent/XContentUtils.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java new file mode 100644 index 00000000000..e33254da448 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.condition; + +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +public class AlwaysCondition implements Condition { + public static final String TYPE = "always"; + public static final Condition INSTANCE = new AlwaysCondition(); + + protected AlwaysCondition() { } + + @Override + public boolean equals(Object obj) { + return obj instanceof AlwaysCondition; + } + + @Override + public int hashCode() { + // All instances has to produce the same hashCode because they are all equal + return 0; + } + + @Override + public String type() { + return TYPE; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject().endObject(); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java similarity index 77% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java index 286467cde44..70c44d73076 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/Condition.java @@ -6,41 +6,21 @@ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import java.io.IOException; import java.util.Locale; import java.util.Map; -public abstract class Condition implements ToXContentObject { - - protected final String type; - - protected Condition(String type) { - this.type = type; - } +public interface Condition extends ToXContentObject { /** * @return the type of this condition */ - public final String type() { - return type; - } + String type(); - /** - * Executes this condition - */ - public abstract Result execute(WatchExecutionContext ctx); - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().endObject(); - } - - public static class Result implements ToXContentObject { // don't make this final - we can't mock final classes :( + class Result implements ToXContentObject { // don't make this final - we can't mock final classes :( public Map getResolvedValues() { return resolveValues; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionFactory.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionFactory.java similarity index 87% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionFactory.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionFactory.java index fbb41378d94..402b9b55a0f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionFactory.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionFactory.java @@ -20,6 +20,6 @@ public interface ConditionFactory { * @param watchId The id of the watch * @param parser The parsing that contains the condition content */ - Condition parse(Clock clock, String watchId, XContentParser parser) throws IOException; + ExecutableCondition parse(Clock clock, String watchId, XContentParser parser) throws IOException; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionRegistry.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionRegistry.java similarity index 93% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionRegistry.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionRegistry.java index bf55c051f7a..fc96933c0e2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionRegistry.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ConditionRegistry.java @@ -35,8 +35,8 @@ public class ConditionRegistry { * @param watchId The id of the watch * @param parser The parsing that contains the condition content */ - public Condition parseExecutable(String watchId, XContentParser parser) throws IOException { - Condition condition = null; + public ExecutableCondition parseExecutable(String watchId, XContentParser parser) throws IOException { + ExecutableCondition condition = null; ConditionFactory factory; String type = null; diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ExecutableCondition.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ExecutableCondition.java new file mode 100644 index 00000000000..06f2fa43e14 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/condition/ExecutableCondition.java @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.condition; + +import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; + +public interface ExecutableCondition extends Condition { + + /** + * Executes this condition + */ + Result execute(WatchExecutionContext ctx); +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/crypto/CryptoService.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/crypto/CryptoService.java similarity index 93% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/crypto/CryptoService.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/crypto/CryptoService.java index ebada74163c..81e03dbfb72 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/crypto/CryptoService.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/crypto/CryptoService.java @@ -11,8 +11,9 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.security.SecurityField; import org.elasticsearch.xpack.security.authc.support.CharArrays; -import org.elasticsearch.xpack.watcher.Watcher; +import org.elasticsearch.xpack.watcher.WatcherField; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; @@ -29,8 +30,6 @@ import java.util.Arrays; import java.util.Base64; import java.util.List; -import static org.elasticsearch.xpack.security.Security.setting; - /** * Service that provides cryptographic methods based on a shared system key */ @@ -53,11 +52,11 @@ public class CryptoService extends AbstractComponent { private static final int DEFAULT_KEY_LENGTH = 128; private static final Setting ENCRYPTION_ALGO_SETTING = - new Setting<>(setting("encryption.algorithm"), s -> DEFAULT_ENCRYPTION_ALGORITHM, s -> s, Property.NodeScope); + new Setting<>(SecurityField.setting("encryption.algorithm"), s -> DEFAULT_ENCRYPTION_ALGORITHM, s -> s, Property.NodeScope); private static final Setting ENCRYPTION_KEY_LENGTH_SETTING = - Setting.intSetting(setting("encryption_key.length"), DEFAULT_KEY_LENGTH, Property.NodeScope); + Setting.intSetting(SecurityField.setting("encryption_key.length"), DEFAULT_KEY_LENGTH, Property.NodeScope); private static final Setting ENCRYPTION_KEY_ALGO_SETTING = - new Setting<>(setting("encryption_key.algorithm"), DEFAULT_KEY_ALGORITH, s -> s, Property.NodeScope); + new Setting<>(SecurityField.setting("encryption_key.algorithm"), DEFAULT_KEY_ALGORITH, s -> s, Property.NodeScope); private final SecureRandom secureRandom = new SecureRandom(); private final String encryptionAlgorithm; @@ -78,7 +77,7 @@ public class CryptoService extends AbstractComponent { throw new IllegalArgumentException("invalid key length [" + keyLength + "]. value must be a multiple of 8"); } - SecretKey systemKey = readSystemKey(Watcher.ENCRYPTION_KEY_SETTING.get(settings)); + SecretKey systemKey = readSystemKey(WatcherField.ENCRYPTION_KEY_SETTING.get(settings)); try { encryptionKey = encryptionKey(systemKey, keyLength, keyAlgorithm); } catch (NoSuchAlgorithmException nsae) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ActionExecutionMode.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/ActionExecutionMode.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ActionExecutionMode.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/ActionExecutionMode.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionPhase.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionPhase.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionPhase.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionPhase.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionState.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionState.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionState.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionState.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/QueuedWatch.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/QueuedWatch.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/QueuedWatch.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/QueuedWatch.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionContext.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionContext.java similarity index 96% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionContext.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionContext.java index 0b58649ab4f..0fb2bcd7385 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionContext.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionContext.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.watcher.execution; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.xpack.watcher.actions.ActionWrapper; +import org.elasticsearch.xpack.watcher.actions.ActionWrapperResult; import org.elasticsearch.xpack.watcher.condition.Condition; import org.elasticsearch.xpack.watcher.history.WatchRecord; import org.elasticsearch.xpack.watcher.input.Input; @@ -41,7 +41,7 @@ public abstract class WatchExecutionContext { private Input.Result inputResult; private Condition.Result conditionResult; private Transform.Result transformResult; - private ConcurrentMap actionsResults = ConcurrentCollections.newConcurrentMap(); + private ConcurrentMap actionsResults = ConcurrentCollections.newConcurrentMap(); private String nodeId; public WatchExecutionContext(String watchId, DateTime executionTime, TriggerEvent triggerEvent, TimeValue defaultThrottlePeriod) { @@ -192,13 +192,13 @@ public abstract class WatchExecutionContext { phase = ExecutionPhase.ACTIONS; } - public void onActionResult(ActionWrapper.Result result) { + public void onActionResult(ActionWrapperResult result) { assert !phase.sealed(); actionsResults.put(result.id(), result); watch().status().onActionResult(result.id(), executionTime, result.action()); } - public Map actionsResults() { + public Map actionsResults() { return Collections.unmodifiableMap(actionsResults); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java similarity index 91% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java index 0addb8e61e3..352d37c93dc 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionResult.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.watcher.actions.ActionWrapper; +import org.elasticsearch.xpack.watcher.actions.ActionWrapperResult; import org.elasticsearch.xpack.watcher.condition.Condition; import org.elasticsearch.xpack.watcher.input.Input; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; @@ -26,7 +26,7 @@ public class WatchExecutionResult implements ToXContentObject { @Nullable private final Input.Result inputResult; @Nullable private final Condition.Result conditionResult; @Nullable private final Transform.Result transformResult; - private final Map actionsResults; + private final Map actionsResults; public WatchExecutionResult(WatchExecutionContext context, long executionDurationMs) { this(context.executionTime(), executionDurationMs, context.inputResult(), context.conditionResult(), context.transformResult(), @@ -35,7 +35,7 @@ public class WatchExecutionResult implements ToXContentObject { private WatchExecutionResult(DateTime executionTime, long executionDurationMs, Input.Result inputResult, Condition.Result conditionResult, @Nullable Transform.Result transformResult, - Map actionsResults) { + Map actionsResults) { this.executionTime = executionTime; this.inputResult = inputResult; this.conditionResult = conditionResult; @@ -64,7 +64,7 @@ public class WatchExecutionResult implements ToXContentObject { return transformResult; } - public Map actionsResults() { + public Map actionsResults() { return actionsResults; } @@ -85,7 +85,7 @@ public class WatchExecutionResult implements ToXContentObject { builder.field(Transform.TRANSFORM.getPreferredName(), transformResult, params); } builder.startArray(Field.ACTIONS.getPreferredName()); - for (ActionWrapper.Result result : actionsResults.values()) { + for (ActionWrapperResult result : actionsResults.values()) { result.toXContent(builder, params); } builder.endArray(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java similarity index 95% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java index 584a66fb46e..a8f02ccab5e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/WatchExecutionSnapshot.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.watcher.actions.ActionWrapper; +import org.elasticsearch.xpack.watcher.actions.ActionWrapperResult; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -37,10 +37,10 @@ public class WatchExecutionSnapshot implements Streamable, ToXContentObject { executionTime = context.executionTime(); phase = context.executionPhase(); if (phase == ExecutionPhase.ACTIONS) { - Map actionResults = context.actionsResults(); + Map actionResults = context.actionsResults(); executedActions = new String[actionResults.size()]; int i = 0; - for (ActionWrapper.Result actionResult : actionResults.values()) { + for (ActionWrapperResult actionResult : actionResults.values()) { executedActions[i++] = actionResult.id(); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/Wid.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/Wid.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/Wid.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/execution/Wid.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java similarity index 94% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java index fdb29b2a5f0..2726ef564fe 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/history/WatchRecord.java @@ -13,8 +13,8 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.actions.Action; -import org.elasticsearch.xpack.watcher.actions.ActionWrapper; -import org.elasticsearch.xpack.watcher.condition.Condition; +import org.elasticsearch.xpack.watcher.actions.ActionWrapperResult; +import org.elasticsearch.xpack.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.execution.WatchExecutionResult; @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.watcher.input.ExecutableInput; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchField; import java.io.IOException; import java.util.Collection; @@ -53,12 +54,12 @@ public abstract class WatchRecord implements ToXContentObject { protected final Map vars; @Nullable protected final ExecutableInput input; - @Nullable protected final Condition condition; + @Nullable protected final ExecutableCondition condition; @Nullable protected final Map metadata; @Nullable protected final WatchExecutionResult executionResult; private WatchRecord(Wid id, TriggerEvent triggerEvent, ExecutionState state, Map vars, ExecutableInput input, - Condition condition, Map metadata, Watch watch, WatchExecutionResult executionResult, + ExecutableCondition condition, Map metadata, Watch watch, WatchExecutionResult executionResult, String nodeId) { this.id = id; this.triggerEvent = triggerEvent; @@ -100,7 +101,7 @@ public abstract class WatchRecord implements ToXContentObject { return ExecutionState.FAILED; } if (executionResult.conditionResult().met()) { - final Collection values = executionResult.actionsResults().values(); + final Collection values = executionResult.actionsResults().values(); // acknowledged as state wins because the user had explicitely set this, where as throttled may happen due to execution if (values.stream().anyMatch((r) -> r.action().status() == Action.Result.Status.ACKNOWLEDGED)) { return ExecutionState.ACKNOWLEDGED; @@ -163,12 +164,12 @@ public abstract class WatchRecord implements ToXContentObject { } if (input != null) { - builder.startObject(Watch.Field.INPUT.getPreferredName()) + builder.startObject(WatchField.INPUT.getPreferredName()) .field(input.type(), input, params) .endObject(); } if (condition != null) { - builder.startObject(Watch.Field.CONDITION.getPreferredName()) + builder.startObject(WatchField.CONDITION.getPreferredName()) .field(condition.type(), condition, params) .endObject(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/input/ExecutableInput.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/input/ExecutableInput.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/input/ExecutableInput.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/input/ExecutableInput.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/input/Input.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/input/Input.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/input/Input.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/input/Input.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/input/none/NoneInput.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/input/none/NoneInput.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/input/none/NoneInput.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/input/none/NoneInput.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/Exceptions.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/Exceptions.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/support/Exceptions.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/Exceptions.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtils.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherDateTimeUtils.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherUtils.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherUtils.java similarity index 90% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherUtils.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherUtils.java index 4c6d20a4f04..d8d5e81597a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherUtils.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherUtils.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.support; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentHelper; @@ -15,11 +16,14 @@ import java.io.IOException; import java.lang.reflect.Array; import java.util.HashMap; import java.util.Map; +import java.util.regex.Pattern; import static org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils.formatDate; public final class WatcherUtils { + private static final Pattern NO_WS_PATTERN = Pattern.compile("\\S+"); + private WatcherUtils() { } @@ -71,4 +75,8 @@ public final class WatcherUtils { } result.put(key, String.valueOf(value)); } + + public static boolean isValidId(String id) { + return Strings.isEmpty(id) == false && NO_WS_PATTERN.matcher(id).matches(); + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/ObjectPath.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/ObjectPath.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/ObjectPath.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/ObjectPath.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherParams.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherParams.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherParams.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherParams.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java similarity index 97% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java index 7f8ccb2f285..66ab179983c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/WatcherXContentParser.java @@ -23,7 +23,7 @@ import java.util.Map; /** * A xcontent parser that is used by watcher. This is a special parser that is * aware of watcher services. In particular, it's aware of the used {@link Clock} - * and the {@link CryptoService}. The former (clock) may be used when the current time + * and the CryptoService. The former (clock) may be used when the current time * is required during the parse phase of construct. The latter (crypto service) is used * to encode secret values (e.g. passwords, security tokens, etc..) to {@link Secret}s. * {@link Secret}s are encrypted values that are stored in memory and are decrypted @@ -60,7 +60,7 @@ public class WatcherXContentParser implements XContentParser { public static Clock clock(XContentParser parser) { if (parser instanceof WatcherXContentParser) { - return ((WatcherXContentParser) parser).clock; + return ((WatcherXContentParser) parser).getClock(); } return Clock.systemUTC(); } @@ -75,6 +75,8 @@ public class WatcherXContentParser implements XContentParser { this.cryptoService = cryptoService; } + public Clock getClock() { return clock; } + @Override public XContentType contentType() { return parser.contentType(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/XContentSource.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/XContentSource.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/XContentSource.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/support/xcontent/XContentSource.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/ExecutableTransform.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/ExecutableTransform.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/ExecutableTransform.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/ExecutableTransform.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/Transform.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/Transform.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/Transform.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/Transform.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/TransformFactory.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/TransformFactory.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/TransformFactory.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/TransformFactory.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/TransformRegistry.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/TransformRegistry.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/TransformRegistry.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/TransformRegistry.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransform.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransform.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransform.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransform.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformFactory.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformFactory.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformFactory.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ChainTransformFactory.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ExecutableChainTransform.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ExecutableChainTransform.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ExecutableChainTransform.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transform/chain/ExecutableChainTransform.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequest.java similarity index 94% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequest.java index 4544db90a0f..a3b3dcc3e4f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequest.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.support.WatcherUtils; import java.io.IOException; import java.util.Locale; @@ -72,7 +72,7 @@ public class AckWatchRequest extends ActionRequest { ActionRequestValidationException validationException = null; if (watchId == null){ validationException = ValidateActions.addValidationError("watch id is missing", validationException); - } else if (Watch.isValidId(watchId) == false) { + } else if (WatcherUtils.isValidId(watchId) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } if (actionIds != null) { @@ -80,7 +80,7 @@ public class AckWatchRequest extends ActionRequest { if (actionId == null) { validationException = ValidateActions.addValidationError( String.format(Locale.ROOT, "action id may not be null"), validationException); - } else if (Watch.isValidId(actionId) == false) { + } else if (WatcherUtils.isValidId(actionId) == false) { validationException = ValidateActions.addValidationError( String.format(Locale.ROOT, "action id [%s] contains whitespace", actionId), validationException); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/AckWatchResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequest.java similarity index 95% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequest.java index 1559240836a..74ffdd5d9fb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequest.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.support.WatcherUtils; import java.io.IOException; @@ -64,7 +64,7 @@ public class ActivateWatchRequest extends ActionRequest { ActionRequestValidationException validationException = null; if (watchId == null){ validationException = ValidateActions.addValidationError("watch id is missing", validationException); - } else if (Watch.isValidId(watchId) == false) { + } else if (WatcherUtils.isValidId(watchId) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } return validationException; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/ActivateWatchResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequest.java similarity index 94% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequest.java index 284f22964a1..c824f0ece4e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequest.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.support.WatcherUtils; import java.io.IOException; @@ -50,7 +50,7 @@ public class DeleteWatchRequest extends ActionRequest { ActionRequestValidationException validationException = null; if (id == null){ validationException = ValidateActions.addValidationError("watch id is missing", validationException); - } else if (Watch.isValidId(id) == false) { + } else if (WatcherUtils.isValidId(id) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } return validationException; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/DeleteWatchResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequest.java similarity index 97% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequest.java index 92d74098e31..094ee355127 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequest.java @@ -15,8 +15,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode; +import org.elasticsearch.xpack.watcher.support.WatcherUtils; import org.elasticsearch.xpack.watcher.trigger.TriggerEvent; -import org.elasticsearch.xpack.watcher.watch.Watch; import java.io.IOException; import java.util.HashMap; @@ -246,14 +246,14 @@ public class ExecuteWatchRequest extends ActionRequest { validationException = ValidateActions.addValidationError("a watch execution request must either have a watch id or an inline " + "watch source, but both are missing", validationException); } - if (id != null && Watch.isValidId(id) == false) { + if (id != null && WatcherUtils.isValidId(id) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } for (String actionId : actionModes.keySet()) { if (actionId == null) { validationException = ValidateActions.addValidationError( String.format(Locale.ROOT, "action id may not be null"), validationException); - } else if (Watch.isValidId(actionId) == false) { + } else if (WatcherUtils.isValidId(actionId) == false) { validationException = ValidateActions.addValidationError( String.format(Locale.ROOT, "action id [%s] contains whitespace", actionId), validationException); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/ExecuteWatchResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequest.java similarity index 94% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequest.java index 3b7b7748493..db5cc7e83be 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequest.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.support.WatcherUtils; import java.io.IOException; @@ -52,7 +52,7 @@ public class GetWatchRequest extends ActionRequest { ActionRequestValidationException validationException = null; if (id == null) { validationException = ValidateActions.addValidationError("watch id is missing", validationException); - } else if (Watch.isValidId(id) == false) { + } else if (WatcherUtils.isValidId(id) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/GetWatchResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequest.java similarity index 97% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequest.java index e14bfaf6f88..57343721a06 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequest.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequest.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.support.WatcherUtils; import java.io.IOException; @@ -121,7 +121,7 @@ public class PutWatchRequest extends ActionRequest { ActionRequestValidationException validationException = null; if (id == null) { validationException = ValidateActions.addValidationError("watch id is missing", validationException); - } else if (Watch.isValidId(id) == false) { + } else if (WatcherUtils.isValidId(id) == false) { validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); } if (source == null) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/PutWatchResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/service/WatcherServiceResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsAction.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsAction.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsAction.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsAction.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsRequest.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsRequest.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsRequest.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsRequest.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsRequestBuilder.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsRequestBuilder.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsRequestBuilder.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsRequestBuilder.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsResponse.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsResponse.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsResponse.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/WatcherStatsResponse.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/trigger/Trigger.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/trigger/Trigger.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/trigger/Trigger.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/trigger/Trigger.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEvent.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEvent.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEvent.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEvent.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/Payload.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/Payload.java similarity index 100% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/Payload.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/Payload.java diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java new file mode 100644 index 00000000000..2e9727690b9 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.watch; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.watcher.actions.ActionStatus; +import org.elasticsearch.xpack.watcher.actions.ActionWrapper; +import org.elasticsearch.xpack.watcher.condition.ExecutableCondition; +import org.elasticsearch.xpack.watcher.input.ExecutableInput; +import org.elasticsearch.xpack.watcher.transform.ExecutableTransform; +import org.elasticsearch.xpack.watcher.trigger.Trigger; +import org.joda.time.DateTime; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class Watch implements ToXContentObject { + + public static final String INCLUDE_STATUS_KEY = "include_status"; + public static final String INDEX = ".watches"; + public static final String DOC_TYPE = "doc"; + + private final String id; + private final Trigger trigger; + private final ExecutableInput input; + private final ExecutableCondition condition; + @Nullable private final ExecutableTransform transform; + private final List actions; + @Nullable private final TimeValue throttlePeriod; + @Nullable private final Map metadata; + private final WatchStatus status; + + private transient long version = Versions.MATCH_ANY; + + public Watch(String id, Trigger trigger, ExecutableInput input, ExecutableCondition condition, @Nullable ExecutableTransform transform, + @Nullable TimeValue throttlePeriod, List actions, @Nullable Map metadata, + WatchStatus status) { + this.id = id; + this.trigger = trigger; + this.input = input; + this.condition = condition; + this.transform = transform; + this.actions = actions; + this.throttlePeriod = throttlePeriod; + this.metadata = metadata; + this.status = status; + } + + public String id() { + return id; + } + + public Trigger trigger() { + return trigger; + } + + public ExecutableInput input() { return input;} + + public ExecutableCondition condition() { + return condition; + } + + public ExecutableTransform transform() { + return transform; + } + + public TimeValue throttlePeriod() { + return throttlePeriod; + } + + public List actions() { + return actions; + } + + public Map metadata() { + return metadata; + } + + public WatchStatus status() { + return status; + } + + public long version() { + return version; + } + + public void version(long version) { + this.version = version; + } + + /** + * Sets the state of this watch to in/active + * + * @return {@code true} if the status of this watch changed, {@code false} otherwise. + */ + public boolean setState(boolean active, DateTime now) { + return status.setActive(active, now); + } + + /** + * Acks this watch. + * + * @return {@code true} if the status of this watch changed, {@code false} otherwise. + */ + public boolean ack(DateTime now, String... actions) { + return status.onAck(now, actions); + } + + public boolean acked(String actionId) { + ActionStatus actionStatus = status.actionStatus(actionId); + return actionStatus.ackStatus().state() == ActionStatus.AckStatus.State.ACKED; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Watch watch = (Watch) o; + return watch.id.equals(id); + } + + @Override + public int hashCode() { + return id.hashCode(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(WatchField.TRIGGER.getPreferredName()).startObject().field(trigger.type(), trigger, params).endObject(); + builder.field(WatchField.INPUT.getPreferredName()).startObject().field(input.type(), input, params).endObject(); + builder.field(WatchField.CONDITION.getPreferredName()).startObject().field(condition.type(), condition, params).endObject(); + if (transform != null) { + builder.field(WatchField.TRANSFORM.getPreferredName()).startObject().field(transform.type(), transform, params).endObject(); + } + if (throttlePeriod != null) { + builder.timeValueField(WatchField.THROTTLE_PERIOD.getPreferredName(), + WatchField.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); + } + builder.startObject(WatchField.ACTIONS.getPreferredName()); + for (ActionWrapper action : actions) { + builder.field(action.id(), action, params); + } + builder.endObject(); + if (metadata != null) { + builder.field(WatchField.METADATA.getPreferredName(), metadata); + } + if (params.paramAsBoolean(INCLUDE_STATUS_KEY, false)) { + builder.field(WatchField.STATUS.getPreferredName(), status, params); + } + builder.endObject(); + return builder; + } + +} diff --git a/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchField.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchField.java new file mode 100644 index 00000000000..2ce1a11c4a4 --- /dev/null +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchField.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.watch; + +import org.elasticsearch.common.ParseField; + +public final class WatchField { + public static final ParseField TRIGGER = new ParseField("trigger"); + public static final ParseField INPUT = new ParseField("input"); + public static final ParseField CONDITION = new ParseField("condition"); + public static final ParseField ACTIONS = new ParseField("actions"); + public static final ParseField TRANSFORM = new ParseField("transform"); + public static final ParseField THROTTLE_PERIOD = new ParseField("throttle_period_in_millis"); + public static final ParseField THROTTLE_PERIOD_HUMAN = new ParseField("throttle_period"); + public static final ParseField METADATA = new ParseField("metadata"); + public static final ParseField STATUS = new ParseField("status"); + public static final String ALL_ACTIONS_ID = "_all"; + + private WatchField() {} +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java similarity index 98% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java rename to plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java index e6bec1b8ed4..07e6434d3ec 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java +++ b/plugin/core/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchStatus.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.actions.Action; import org.elasticsearch.xpack.watcher.actions.ActionStatus; -import org.elasticsearch.xpack.watcher.actions.throttler.AckThrottler; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherXContentParser; @@ -156,7 +155,7 @@ public class WatchStatus implements ToXContentObject, Streamable { * Notifies this status that the givne actions were acked. If the current state of one of these actions is * {@link org.elasticsearch.xpack.watcher.actions.ActionStatus.AckStatus.State#ACKABLE ACKABLE}, * then we'll it'll change to {@link org.elasticsearch.xpack.watcher.actions.ActionStatus.AckStatus.State#ACKED ACKED} - * (when set to {@link org.elasticsearch.xpack.watcher.actions.ActionStatus.AckStatus.State#ACKED ACKED}, the {@link AckThrottler} + * (when set to {@link org.elasticsearch.xpack.watcher.actions.ActionStatus.AckStatus.State#ACKED ACKED}, the AckThrottler * will throttle the execution of the action. * * @return {@code true} if the state of changed due to the ack, {@code false} otherwise. @@ -165,7 +164,7 @@ public class WatchStatus implements ToXContentObject, Streamable { boolean changed = false; boolean containsAll = false; for (String actionId : actionIds) { - if (actionId.equals(Watch.ALL_ACTIONS_ID)) { + if (actionId.equals(WatchField.ALL_ACTIONS_ID)) { containsAll = true; break; } diff --git a/plugin/licenses/x-pack-core-LICENSE.txt b/plugin/licenses/x-pack-core-LICENSE.txt new file mode 100644 index 00000000000..9202eab5b2f --- /dev/null +++ b/plugin/licenses/x-pack-core-LICENSE.txt @@ -0,0 +1,120 @@ +COMMERCIAL SOFTWARE END USER LICENSE AGREEMENT + + READ THIS COMMERCIAL SOFTWARE END USER LICENSE AGREEMENT CAREFULLY, WHICH CONSTITUTES A LEGALLY BINDING AGREEMENT AND GOVERNS YOUR USE OF ELASTIC’S PROPRIETARY SOFTWARE. BY INSTALLING AND/OR USING SUCH SOFTWARE, YOU ARE INDICATING THAT YOU AGREE TO THE TERMS AND CONDITIONS SET FORTH IN THIS AGREEMENT. IF YOU DO NOT AGREE WITH SUCH TERMS AND CONDITIONS, YOU MAY NOT INSTALL OR USE ANY OF THE SOFTWARE. IF YOU ARE INSTALLING OR USING THE SOFTWARE ON BEHALF OF YOUR EMPLOYER OR ANOTHER ENTITY, YOU REPRESENT AND WARRANT THAT YOU HAVE THE ACTUAL AUTHORITY TO AGREE TO THE TERMS AND CONDITIONS ON BEHALF OF SUCH EMPLOYER OR OTHER ENTITY. + + This COMMERCIAL SOFTWARE END USER LICENSE AGREEMENT (this “Agreement") is entered into by and between the applicable Elastic entity referenced in Attachment 1 hereto (“Elastic”) and the person, or entity on behalf of whom you are acting, as applicable (“You” or “Customer”) that has downloaded any of Elastic’s proprietary software to which this Agreement is attached or in connection with which this Agreement is presented to You (collectively, the “Software”). This Agreement is effective upon the earliest date of the commencement of any License granted pursuant to Section 1.1. below (as applicable, the “Effective Date”). + +1. SOFTWARE LICENSE AND RESTRICTIONS +1.1 License Grants. +(a) Trial Version License. Subject to the terms and conditions of this Agreement, Elastic agrees to grant, and does hereby grant to You, for a period of thirty (30) days from the date on which You first install the Software (the “Trial Term”), a License to use the Eligible Features and Functions of the Software that are applicable to the Trial Version of the Software.   You understand and agree that upon the expiration of a Trial Term, You will no longer be able to use the Software, unless you either (i) purchase a Subscription, in which case You will receive a License under Section 1.1(b) below to use the Eligible Features and Functions of the Software that are applicable to the Subscription level that You purchase, (ii) complete the Registration of Your use of the Software with Elastic, in which case, if available, You will receive a License under Section 1.1(c) below to the Basic Version of the Software or (iii) obtain from Elastic written consent (e-mail sufficient) to extend the Trial Term, which may be granted by Elastic in its sole and absolute discretion. +(b) Subscription License. If you enter into a Subscription Agreement with Elastic, then, subject to the terms and conditions of this Agreement and complete payment of any and all applicable Subscription fees, Elastic agrees to grant, and does hereby grant to You during the applicable Subscription Term, and for the restricted scope of this Agreement, a License to use the Eligible Features and Functions of the Software that are applicable to the Subscription level that You have purchased, for the number of Nodes and for the specific Project for which you have purchased a Subscription. The level of Subscription, the number of Nodes and specific Project for which you have purchased such Subscription, are set forth on the applicable ordering document entered into by Elastic and You for the purchase of the applicable Subscription (“Order Form”). +(c) Basic Version License. Subject to the terms and conditions of this Agreement, the availability of such a License for the applicable Software and any applicable limitation on the number of Nodes, and in consideration of the Registration of Your use the Software, Elastic agrees to grant, and does hereby grant to You, for a period of one (1) year from the date of Registration, a License to use the Eligible Features and Functions of the Software that are applicable to the Basic Version of the Software. The foregoing license may be renewed annually upon the mutual agreement of the parties. +1.2 Reservation of Rights; Restrictions. As between Elastic and You, Elastic owns all right title and interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software is granted to You by implication, estoppel or otherwise. You agree not to: (i) reverse engineer or decompile, decrypt, disassemble or otherwise reduce any Software or any portion thereof to human-readable form, except and only to the extent any such restriction is prohibited by applicable law, (ii) deploy the Software on more Nodes than are permitted under the applicable License grant in Section 1.1 above, (iii) where You have purchased a Subscription, use the Software in connection with any Project other than the Project for which You have purchased such Subscription, as identified on the applicable Order Form, (iv) prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement; (v) except as expressly permitted in Section 1.1 above, transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (vi) except as may be expressly permitted on an applicable Order Form or in another agreement between the parties, use the Software for providing time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other service offering; (vii) circumvent the limitations on use of the Software that are imposed or preserved by any License Key, (viii) alter or remove any Marks and Notices in the Software; (ix) deploy the Commercial Software on or in connection with any third party infrastructure as a service that includes any Elastic-branded software as a service; or (x) make available to any third party any analysis of the results of operation of the Software, including benchmarking results, without the prior written consent of Elastic. The Software may contain or be provided with open source libraries, components, utilities and other open source software (collectively, “Open Source Software”), which Open Source Software may have applicable license terms as identified on a website designated by Elastic or otherwise provided with the Software or Documentation. Notwithstanding anything to the contrary herein, use of the Open Source Software shall be subject to the license terms and conditions applicable to such Open Source Software, to the extent required by the applicable licensor (which terms shall not restrict the license rights granted to You hereunder, but may contain additional rights). +1.3 Audit Rights. You agree that, unless such right is waived in writing by Elastic, Elastic shall have the right, upon fifteen (15) days’ notice to You, to audit Your use of the Software for compliance with any limitations on Your use of the Software that are set forth herein. You agree to provide Elastic with the necessary access to the Software to conduct such an audit either (i) remotely, or (ii) if remote performance is not possible, at Your facilities, during normal business hours and no more than one (1) time in any twelve (12) month period. In the event any such audit reveals that You have used the Software in excess of the applicable quantitative limitations, You agree to promptly pay to Elastic an amount equal to the difference between the fees actually paid and the fees that You should have paid to remain in compliance with such quantitative limitations. This Section 1.3 shall survive for a period of two (2) years from the termination or expiration of this Agreement. +1.4 Cluster Metadata. You understand and agree that once deployed, and on a daily basis, the Software may provide metadata to Elastic about Your cluster statistics and associates that metadata with Your IP address. However, no other information is provided to Elastic by the Software, including any information about the data You process or store in connection with Your use of the Software. Instructions for disabling this feature are contained in the Software, however leaving this feature active enables Elastic to gather cluster statistics and provide an improved level of support to You. +2. TERM AND TERMINATION +2.1 Term. Unless earlier terminated under Section 2.2 below, this Agreement shall commence on the Effective Date, and shall continue in force for the term of the last to expire applicable license set forth in Section 1.1 above. +2.2 Termination. Either party may, upon written notice to the other party, terminate this Agreement for material breach by the other party automatically and without any other formality, if such party has failed to cure such material breach within thirty (30) days of receiving written notice of such material breach from the non-breaching party. Notwithstanding the foregoing, this Agreement shall automatically terminate in the event that You intentionally breach the scope of a license granted in Section 1.1 of this Agreement, provided that Elastic reserves the right to retroactively waive such automatic termination upon written notice to You. +2.3 Post Termination or Expiration. Upon termination or expiration of this Agreement, for any reason, You shall promptly cease the use of the Software and Documentation and destroy (and certify to Elastic in writing the fact of such destruction), or return to Elastic, all copies of the Software and Documentation then in Your possession or under Your control. +2.4 Survival. Sections 2.3, 2.4, 3, 4, 5 and 6 (as any such Sections may be modified by Attachment 1, if applicable) shall survive any termination or expiration of this Agreement. +3. LIMITED WARRANTY AND DISCLAIMER OF WARRANTIES +3.1 Limited Performance Warranty. Subject to You purchasing a Subscription, Elastic warrants that during the applicable Subscription Term, the Software will perform in all material respects in accordance with the Documentation. In the event of a breach of the foregoing warranty, Elastic’s sole obligation, and Your exclusive remedy shall be for Elastic to (i) correct any failure(s) of the Software to perform in all material respects in accordance with the Documentation or (ii) if Elastic is unable to provide such a correction within thirty (30) days of receipt of notice of the applicable non-conformity, You may elect to terminate this Agreement and the associated Subscription, and Elastic will promptly refund to You any pre-paid, unused fees paid by You to Elastic for the applicable Subscription. The warranty set forth in this Section 3.1 does not apply if the applicable Software or any portion thereof: (a) has been altered, except by or on behalf Elastic; (b) has not been used, installed, operated, repaired, or maintained in accordance with this Agreement and/or the Documentation; (c) has been subjected to abnormal physical or electrical stress, misuse, negligence, or accident; or (d) is used on equipment, products, or systems not meeting specifications identified by Elastic in the Documentation. Additionally, the warranties set forth herein only apply when notice of a warranty claim is provided to Elastic within the applicable warranty period specified herein and do not apply to any bug, defect or error caused by or attributable to software or hardware not supplied by Elastic. +3.2 Malicious Code. Elastic represents and warrants that prior to making it available for delivery to You, Elastic will use standard industry practices including, without limitation, the use of an updated commercial anti-virus program, to test the Software for Malicious Code and remove any Malicious Code it discovers. In the event of a breach of the foregoing warranty, Elastic’s sole obligation, and Your exclusive remedy shall be for Elastic to replace the Software with Software that does not contain any Malicious Code. +3.3 Warranty Disclaimer. EXCEPT AS EXPRESSLY SET FORTH IN THIS SECTION 3, TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT WARRANTY OF ANY KIND, AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, ELASTIC AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED. +4. LIMITATION OF LIABILITY +The provisions of this Section 4, including to the extent modified by an applicable provision in Attachment 1 hereto, apply if You have not purchased a Subscription. If you have purchased a Subscription, then the limitations of liability set forth in the applicable Subscription Agreement will apply in lieu of those set forth in this Section 4, including to the extent modified by an applicable provision in Attachment 1 hereto. +4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTIC OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY OTHER LIABILITY THAT CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW. +4.2 Damages Cap. IN NO EVENT SHALL ELASTIC’S OR ITS LICENSORS’ AGGREGATE, CUMULATIVE LIABILITY UNDER THIS AGREEMENT EXCEED ONE THOUSAND DOLLARS ($1,000). +4.3 YOU AGREE THAT THE FOREGOING LIMITATIONS, EXCLUSIONS AND DISCLAIMERS ARE A REASONABLE ALLOCATION OF THE RISK BETWEEN THE PARTIES AND WILL APPLY TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, EVEN IF ANY REMEDY FAILS IN ITS ESSENTIAL PURPOSE. +5. MISCELLANEOUS +This Agreement, including Attachment 1 hereto, which is hereby incorporated herein by this reference, as well as any applicable Order Form and Subscription Agreement, completely and exclusively state the entire agreement of the parties regarding the subject matter herein, and it supersedes, and its terms govern, all prior proposals, agreements, or other communications between the parties, oral or written, regarding such subject matter. In the event of any conflict between the terms and conditions of any of the foregoing documents, the conflict shall be resolved based on the following order of precedence: (i) an applicable Order Form (but only for the transaction thereunder), (ii) an applicable Subscription Agreement, (iii) the Support Services Policy and (iv) this Agreement. For the avoidance of doubt, the parties hereby expressly acknowledge and agree that if You issue any purchase order or similar document in connection with the purchase of a Subscription and/or obtaining of License to the Software, You will do so only for Your internal, administrative purposes and not with the intent to provide any contractual terms. This Agreement may not be modified except by a subsequently dated, written amendment that expressly amends this Agreement and which is signed on behalf of Elastic and You, by duly authorized representatives. If any provision hereof is held unenforceable, this Agreement will continue without said provision and be interpreted to reflect the original intent of the parties. +6. DEFINITIONS +The following terms have the meanings ascribed: +6.1 “Affiliate” means, with respect to a party, any entity that controls, is controlled by, or which is under common control with, such party, where “control” means ownership of at least fifty percent (50%) of the outstanding voting shares of the entity, or the contractual right to establish policy for, and manage the operations of, the entity. +6.2 “Basic Version” means that version of the Software available for use without the purchase of a Subscription, but which does require Registration. +6.3 “Contractor” means any third party contractor performing services on Your behalf. +6.4 “Documentation” means the published end user documentation provided by Elastic with the Software. +6.5 “Eligible Features and Functions” means those features and functions of the Software that are eligible for use with respect to the particular version of the Software licensed by You or the Subscription level purchased by You. A list of the Eligible Features and Functions that correspond to each version of the Software and Subscription levels may be found at https://www.elastic.co/subscriptions. +6.6 “License” means a limited, non-exclusive, non-transferable, fully paid up, right and license (without the right to grant or authorize sublicenses) solely for Your internal business operations to (i) install and use, in object code format, the Software, (ii) use, and distribute internally a reasonable number of copies of the Documentation, provided that You must include on such copies all Marks and Notices; (iii) permit Contractors and Your Affiliates to use the Software and Documentation as set forth in (i) and (ii) above, provided that such use by Contractors must be solely for Your benefit, and You shall be responsible for all acts and omissions of such Contractors and Affiliates in connection with their use of the Software that are contrary to the terms and conditions of this Agreement. +6.7 “License Key” means an alphanumeric code that enables the Eligible Features and Functions of the Software. +6.8 “Malicious Code” means any code that is designed to harm, or otherwise disrupt in any unauthorized manner, the operation of Your computer programs or computer systems or destroy or damage data. For clarity, Malicious Code shall not include any software bugs or errors handled through Support Services, or any standard features of functions of the Software and/or any License Key that are intended to enforce the temporal and/or other limitations on the scope of the use of the Software to the scope of the License granted to You. +6.9 “Marks and Notices” means all Elastic trademarks, trade names, logos and notices present on the Documentation as originally provided by Elastic. +6.10 “Node” means an instance of Software on a single physical server or virtual machine, provided that all client Nodes are excluded from calculating Subscription fees based on the number of Nodes. +6.11 “Project” means a specific use case for the Software, with Nodes being deployed for use in a logical grouping of functionality to support such use case. +6.12 “Registration” means Elastic’s then-current process under which You may register Your use of the Software with Elastic by providing certain information to Elastic regarding You and Your use of the Software. +6.13 “Subscription” means the right to receive Support Services and a License to the Software. +6.14 “Subscription Agreement” means a legally enforceable agreement between You and Elastic, under which You purchase a Subscription. +6.15 “Subscription Level” means the level of Subscription purchased by You. The Subscription Level purchased by You determines the specific Support Services that You are entitled to receive, and the specific Eligible Features and functions that You are entitled to use. +6.16 “Subscription Term” means the period of time for which You have purchased a Subscription. +6.17 “Trial Version” means that version of the Software available for use without the purchase of a Subscription and without Registration. + +ATTACHMENT 1 +ADDITIONAL TERMS AND CONDITIONS + +A. The following additional terms and conditions apply to all Customers with principal offices in the United States of America: + +(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch, Inc., a Delaware corporation. + +(2) Government Rights. The Software product is "Commercial Computer Software," as that term is defined in 48 C.F.R. 2.101, and as the term is used in 48 C.F.R. Part 12, and is a Commercial Item comprised of "commercial computer software" and "commercial computer software documentation". If acquired by or on behalf of a civilian agency, the U.S. Government acquires this commercial computer software and/or commercial computer software documentation subject to the terms of this Agreement, as specified in 48 C.F.R. 12.212 (Computer Software) and 12.211 (Technical Data) of the Federal Acquisition Regulation ("FAR") and its successors. If acquired by or on behalf of any agency within the Department of Defense ("DOD"), the U.S. Government acquires this commercial computer software and/or commercial computer software documentation subject to the terms of the Elastic Software End User License Agreement as specified in 48 C.F.R. 227.7202-3 and 48 C.F.R. 227.7202-4 of the DOD FAR Supplement ("DFARS") and its successors, and consistent with 48 C.F.R. 227.7202. This U.S. Government Rights clause, consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202 is in lieu of, and supersedes, any other FAR, DFARS, or other clause or provision that addresses Government rights in computer software, computer software documentation or technical data related to the Software under this Agreement and in any Subcontract under which this commercial computer software and commercial computer software documentation is acquired or licensed. +(3) Export Control. You acknowledge that the goods, software and technology acquired from Elastic are subject to U.S. export control laws and regulations, including but not limited to the International Traffic In Arms Regulations (“ITAR”) (22 C.F.R. Parts 120-130 (2010)); the Export Administration Regulations ("EAR") (15 C.F.R. Parts 730-774 (2010)); the U.S. antiboycott regulations in the EAR and U.S. Department of the Treasury regulations; the economic sanctions regulations and guidelines of the U.S. Department of the Treasury, Office of Foreign Assets Control, and the USA Patriot Act (Title III of Pub. L. 107-56, signed into law October 26, 2001), as amended.  You are now and will remain in the future compliant with all such export control laws and regulations, and will not export, re-export, otherwise transfer any Elastic goods, software or technology or disclose any Elastic software or technology to any person contrary to such laws or regulations.  You acknowledge that remote access to the Software may in certain circumstances be considered a re-export of Software, and accordingly, may not be granted in contravention of U.S. export control laws and regulations. +(4) Governing Law, Jurisdiction and Venue. +(a) Customers in California. If Customer is located in California (as determined by the Customer address on the applicable Order Form, or for a trial license under 1.1(a), the location of person who installed the Software), this Agreement will be governed by the laws of the State of California, without regard to its conflict of laws principles, and all suits hereunder will be brought solely in Federal Court for the Northern District of California, or if that court lacks subject matter jurisdiction, in any California State Court located in Santa Clara County. +(b) Customers Outside of California. If Customer is located anywhere other than California (as determined by the Customer address on the applicable Order Form, or for a trial license under 1.1(a), the location of person who installed the Software), this Agreement will be governed by the laws of the State of Delaware, without regard to its conflict of laws principles, and all suits hereunder will be brought solely in Federal Court for the District of Delaware, or if that court lacks subject matter jurisdiction, in any Delaware State Court located in Wilmington, Delaware. +(c) All Customers. This Agreement shall not be governed by the 1980 UN Convention on Contracts for the International Sale of Goods. The parties hereby irrevocably waive any and all claims and defenses either might otherwise have in any action or proceeding in any of the applicable courts set forth in (a) or (b) above, based upon any alleged lack of personal jurisdiction, improper venue, forum non conveniens, or any similar claim or defense. +(d) Equitable Relief. A breach or threatened breach, by either party of Section 4 may cause irreparable harm for which the non-breaching party shall be entitled to seek injunctive relief without being required to post a bond. + +B. The following additional terms and conditions apply to all Customers with principal offices in Canada: + +(1) Applicable Elasticsearch Entity. The entity providing the license is Elasticsearch B.C. Ltd., a corporation incorporated under laws of the Province of British Columbia. + +(2) Export Control. You acknowledge that the goods, software and technology acquired from Elastic are subject to the restrictions and controls set out in Section A(3) above as well as those imposed by the Export and Import Permits Act (Canada) and the regulations thereunder and that you will comply with all applicable laws and regulations. Without limitation, You acknowledge that the Software, or any portion thereof, will not be exported: (a) to any country on Canada's Area Control List; (b) to any country subject to UN Security Council embargo or action; or (c) contrary to Canada's Export Control List Item 5505. You are now and will remain in the future compliant with all such export control laws and regulations, and will not export, re-export, otherwise transfer any Elastic goods, software or technology or disclose any Elastic software or technology to any person contrary to such laws or regulations.  You will not export or re-export the Software, or any portion thereof, directly or indirectly, in violation of the Canadian export administration laws and regulations to any country or end user, or to any end user who you know or have reason to know will utilize them in the design, development or production of nuclear, chemical or biological weapons. You further acknowledge that the Software product may include technical data subject to such Canadian export regulations. Elastic does not represent that the Software is appropriate or available for use in all countries. Elastic prohibits accessing materials from countries or states where contents are illegal. You are using the Software on your own initiative and you are responsible for compliance with all applicable laws. You hereby agree to indemnify Elastic and its Affiliates from any claims, actions, liability or expenses (including reasonable lawyers' fees) resulting from Your failure to act in accordance with the acknowledgements, agreements, and representations in this Section B(2). + (3) Governing Law and Dispute Resolution. This Agreement shall be governed by the Province of Ontario and the federal laws of Canada applicable therein without regard to conflict of laws provisions. The parties hereby irrevocably waive any and all claims and defenses either might otherwise have in any such action or proceeding in any of such courts based upon any alleged lack of personal jurisdiction, improper venue, forum non conveniens or any similar claim or defense. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence, breach, termination, enforcement, interpretation or validity thereof, including the determination of the scope or applicability of this agreement to arbitrate, (each, a “Dispute”), which the parties are unable to resolve after good faith negotiations, shall be submitted first to the upper management level of the parties. The parties, through their upper management level representatives shall meet within thirty (30) days of the Dispute being referred to them and if the parties are unable to resolve such Dispute within thirty (30) days of meeting, the parties agree to seek to resolve the Dispute through mediation with ADR Chambers in the City of Toronto, Ontario, Canada before pursuing any other proceedings. The costs of the mediator shall be shared equally by the parties. If the Dispute has not been resolved within thirty (30) days of the notice to desire to mediate, any party may terminate the mediation and proceed to arbitration and the matter shall be referred to and finally resolved by arbitration at ADR Chambers pursuant to the general ADR Chambers Rules for Arbitration in the City of Toronto, Ontario, Canada. The arbitration shall proceed in accordance with the provisions of the Arbitration Act (Ontario). The arbitral panel shall consist of three (3) arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall discuss and select a chairman. If the two (2) party-appointed arbitrators are unable to agree on the chairman, the chairman shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be independent of each of the parties. The arbitrators shall have the authority to grant specific performance and to allocate between the parties the costs of arbitration (including service fees, arbitrator fees and all other fees related to the arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any arbitration shall be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith. Judgment upon the award so rendered may be entered in a court having jurisdiction or application may be made to such court for judicial acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the foregoing, Elastic shall have the right to institute an action in a court of proper jurisdiction for preliminary injunctive relief pending a final decision by the arbitrator, provided that a permanent injunction and damages shall only be awarded by the arbitrator. The language to be used in the arbitral proceedings shall be English. + (4) Language. Any translation of this Agreement is done for local requirements and in the event of a dispute between the English and any non-English version, the English version of this Agreement shall govern. At the request of the parties, the official language of this Agreement and all communications and documents relating hereto is the English language, and the English-language version shall govern all interpretation of the Agreement.  À la demande des parties, la langue officielle de la présente convention ainsi que toutes communications et tous documents s'y rapportant est la langue anglaise, et la version anglaise est celle qui régit toute interprétation de la présente convention. +(5) Warranty Disclaimer. For Customers with principal offices in the Province of Québec, the following new sentence is to be added to the end of Section 3.3: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN TYPES OF DAMAGES AND/OR WARRANTIES AND CONDITIONS. THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS AGREEMENT SHALL NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES BEYOND AND DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.” +(6) Limitation of Liability. For Customers with principal offices in the Province of Québec, the following new sentence is to be added to the end of Section 4.1: “SOME JURISDICTIONS DO NOT ALLOW LIMITATIONS OR EXCLUSIONS OF CERTAIN TYPES OF DAMAGES AND/OR WARRANTIES AND CONDITIONS.  THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SET FORTH IN THIS AGREEMENT SHALL NOT APPLY IF AND ONLY IF AND TO THE EXTENT THAT THE LAWS OF A COMPETENT JURISDICTION REQUIRE LIABILITIES BEYOND AND DESPITE THESE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS.” + +C. The following additional terms and conditions apply to all Customers with principal offices outside of the United States of America and Canada: + +(1) Applicable Elasticsearch Entity. The entity providing the license in Germany is Elasticsearch Gmbh; in France is Elasticsearch SARL, in the United Kingdom is Elasticsearch Ltd, in Australia is Elasticsearch Pty Ltd., in Japan is Elasticsearch KK, in Sweden is Elasticsearch AB, in Norway is Elasticsearch AS and in all other countries is Elasticsearch BV. + +(2) Choice of Law. This Agreement shall be governed by and construed in accordance with the laws of the State of New York, without reference to or application of choice of law rules or principles. Notwithstanding any choice of law provision or otherwise, the Uniform Computer Information Transactions Act (UCITA) and the United Nations Convention on the International Sale of Goods shall not apply. + +(3) Arbitration. Any dispute, claim or controversy arising out of or relating to this Agreement or the existence, breach, termination, enforcement, interpretation or validity thereof, including the determination of the scope or applicability of this agreement to arbitrate, (each, a “Dispute”) shall be referred to and finally resolved by arbitration under the rules and at the location identified below. The arbitral panel shall consist of three (3) arbitrators, selected as follows: each party shall appoint one (1) arbitrator; and those two (2) arbitrators shall discuss and select a chairman. If the two party-appointed arbitrators are unable to agree on the chairman, the chairman shall be selected in accordance with the applicable rules of the arbitration body. Each arbitrator shall be independent of each of the parties. The arbitrators shall have the authority to grant specific performance and to allocate between the parties the costs of arbitration (including service fees, arbitrator fees and all other fees related to the arbitration) in such equitable manner as the arbitrators may determine. The prevailing party in any arbitration shall be entitled to receive reimbursement of its reasonable expenses incurred in connection therewith. Judgment upon the award so rendered may be entered in a court having jurisdiction or application may be made to such court for judicial acceptance of any award and an order of enforcement, as the case may be. Notwithstanding the foregoing, Elastic shall have the right to institute an action in a court of proper jurisdiction for preliminary injunctive relief pending a final decision by the arbitrator, provided that a permanent injunction and damages shall only be awarded by the arbitrator. The language to be used in the arbitral proceedings shall be English. + +In addition, the following terms only apply to Customers with principal offices within Europe, the Middle East or Africa (EMEA): + +Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the London Court of International Arbitration (“LCIA”) Rules (which Rules are deemed to be incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of arbitration shall be London, England. + +(b) In addition, the following terms only apply to Customers with principal offices within Asia Pacific, Australia & New Zealand: + +Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under the Rules of Conciliation and Arbitration of the International Chamber of Commerce (“ICC”) in force on the date when the notice of arbitration is submitted in accordance with such Rules (which Rules are deemed to be incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of arbitration shall be Singapore. + +(c) In addition, the following terms only apply to Customers with principal offices within the Americas (excluding North America): + +Arbitration Rules and Location. Any Dispute shall be referred to and finally resolved by arbitration under International Dispute Resolution Procedures of the American Arbitration Association (“AAA”) in force on the date when the notice of arbitration is submitted in accordance with such Procedures (which Procedures are deemed to be incorporated by reference into this clause) on the basis that the governing law is the law of the State of New York, USA. The seat, or legal place, of arbitration shall be New York, New York, USA. + +(4) In addition, for Customers with principal offices within the UK, the following new sentence is added to the end of Section 4.1: + +Nothing in this Agreement shall have effect so as to limit or exclude a party’s liability for death or personal injury caused by negligence or for fraud including fraudulent misrepresentation and this Section 4.1 shall take effect subject to this provision. + +(5) In addition, for Customers with principal offices within France, Sections 1.2, 3 and 4.1 of the Agreement are deleted and replaced with the following new Sections 1.2, 3.3 and 4.1: +1.2 Reservation of Rights; Restrictions. Elastic owns all right title and interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software is granted to You by implication, or otherwise. You agree not to prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement; provided that You may copy the Software for archival purposes, only where such software is provided on a non-durable medium; and You may decompile the Software, where necessary for interoperability purposes and where necessary for the correction of errors making the software unfit for its intended purpose, if such right is not reserved by Elastic as editor of the Software. Pursuant to article L122-6-1 of the French intellectual property code, Elastic reserves the right to correct any bugs as necessary for the Software to serve its intended purpose. You agree not to: (i) transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (ii) use the Software for providing time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other service offering; (iii) alter or remove any proprietary notices in the Software; or (iv) make available to any third party any analysis of the results of operation of the Software, including benchmarking results, without the prior written consent of Elastic. +3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT WARRANTY OF ANY KIND, AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, ELASTIC AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR PURPOSE WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED. +4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTIC OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY INDIRECT OR UNFORESEEABLE DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH, THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU, OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1, OR IN CASE OF DEATH OR PERSONAL INJURY. +(6) In addition, for Customers located within Australia: (a) Sections 3.1, 3.2 and 3.3 of the Agreement are deleted and replaced with the following new Sections 3.1, 3.2, 3.3, 3.4 and 3.5; and (b) Sections 4.1, 4.2 and 4.3 of the Agreement are deleted and replaced with the following new Sections 4.1, 4.2, and 4.3: +3.1 Despite anything in this Agreement, Elastic’s goods come with guarantees that cannot be excluded under the Australian Consumer Law (as set out in the Competition and Consumer Act 2010 (Cth)). You are entitled to a replacement or refund for a major failure and compensation for any other reasonably foreseeable loss or damage. You are also entitled to have the goods repaired or replaced if the goods fail to be of acceptable quality and the failure does not amount to a major failure. +3.2 Limited Performance Warranty. Subject to You purchasing a Subscription, Elastic warrants that during the applicable Subscription Term, the Software will perform in all material respects in accordance with the Documentation. In the event of a breach of the foregoing warranty during the Subscription Term and where You notify Elastic that the Software does not perform in all material respects in accordance with the Documentation, Elastic’s sole obligation, and Your exclusive remedy shall be for Elastic to (i) correct (at Elastic’s cost) any failure(s) of the Software to perform in all material respects in accordance with the Documentation or (ii) if Elastic is unable to provide such a correction within thirty (30) days of receipt of notice of the applicable non-conformity, You may elect to terminate this Agreement and the associated Subscription, and Elastic will promptly refund to You any pre-paid, unused fees paid by You to Elastic for the applicable Subscription. The warranty set forth in this Section 3.2 does not apply if the applicable Software or any portion thereof: (a) has been altered, except by or on behalf Elastic; (b) has not been used, installed, operated, repaired, or maintained in accordance with this Agreement and/or the Documentation; (c) has been subjected to abnormal physical or electrical stress, misuse, negligence, or accident; or (d) is used on equipment, products, or systems not meeting specifications identified by Elastic in the Documentation. Additionally, the warranties set forth herein only apply when notice of a warranty claim is provided to Elastic within the applicable warranty period specified herein and do not apply to any bug, defect or error caused by or attributable to software or hardware not supplied by Elastic. +3.3 For the purposes of Section 3.2, You must use the contact details set out below to notify Elastic that the Software does not perform in all material respects in accordance with the Documentation: +Elasticsearch Pty Ltd +4th Floor, 17-19 Alberta Street +Sydney, New South Wales, 2000, Australia + +3.4 Malicious Code. Elastic represents and warrants that prior to making it available for delivery to You, Elastic will use standard industry practices including, without limitation, the use of an updated commercial anti-virus program, to test the Software for Malicious Code and remove any Malicious Code it discovers. In the event of a breach of the foregoing warranty, Elastic’s sole obligation, and Your exclusive remedy shall be, at Elastic’s option, for Elastic to replace the Software with Software that does not contain any Malicious Code or to pay for the cost of the Software to be replaced with Software that does not contain any Malicious Code. +3.5 Warranty Disclaimer. NOTHING IN THIS AGREEMENT IS INTENDED TO LIMIT CUSTOMER’S NON-EXCLUDABLE RIGHTS UNDER THE COMPETITION AND CONSUMER ACT 2010 (CTH). EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT AND TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT WARRANTY OF ANY KIND, AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW AND EXCEPT AS SET OUT IN THIS AGREEMENT, ELASTIC AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED. +4.1 Disclaimer of Certain Damages. Subject to clause 4.3, a party is not liable for Consequential Loss however caused (including by the negligence of that party) suffered or incurred by the other party in connection with this agreement. “Consequential Loss” means loss of revenues, loss of reputation, indirect loss, loss of profits, consequential loss, loss of actual or anticipated savings, indirect loss, lost opportunities, including opportunities to enter into arrangements with third parties, loss or damage in connection with claims against by third parties, or loss or corruption or data. +4.2 Damages Cap. SUBJECT TO CLAUSES 4.1 AND 4.3, ANY LIABILITY OF ELASTIC FOR ANY LOSS OR DAMAGE, HOWEVER CAUSED (INCLUDING BY THE NEGLIGENCE OF ELASTIC), SUFFERED BY YOU IN CONNECTION WITH THIS AGREEMENT IS LIMITED TO ONE THOUSAND DOLLARS ($1,000). THE LIMITATION SET OUT IN THIS SECTION 4.2 IS AN AGGREGATE LIMIT FOR ALL CLAIMS, WHENEVER MADE. +4.3 Australian Consumer Law. IF THE COMPETITION AND CONSUMER ACT 2010 (CTH) OR ANY OTHER LEGISLATION STATES THAT THERE IS A GUARANTEE IN RELATION TO ANY GOOD OR SERVICE SUPPLIED BY ELASTIC IN CONNECTION WITH THIS AGREEMENT, AND ELASTIC’S LIABILITY FOR FAILING TO COMPLY WITH THAT GUARANTEE CANNOT BE EXCLUDED BUT MAY BE LIMITED, SECTIONS 4.1, 4.2 AND 4.3 DO NOT APPLY TO THAT LIABILITY. INSTEAD, ELASTIC’S LIABILITY FOR THAT FAILURE IS LIMITED TO (AT THE ELECTION OF ELASTIC), IN THE CASE OF A SUPPLY OF GOODS, ELASTIC REPLACING THE GOODS OR SUPPLYING EQUIVALENT GOODS OR REPAIRING THE GOODS, OR IN THE CASE OF A SUPPLY OF SERVICES, ELASTIC SUPPLYING THE SERVICES AGAIN OR PAYING THE COST OF HAVING THE SERVICES SUPPLIED AGAIN. +(7) In addition, for Customers with principal offices within Japan, Sections 1.2, 3 and 4.1 of the Agreement are deleted and replaced with the following new Sections 1.2, 3.3 and 4.1: +1.2 Reservation of Rights; Restrictions. As between Elastic and You, Elastic owns all right title and interest in and to the Software and any derivative works thereof, and except as expressly set forth in Section 1.1 above, no other license to the Software is granted to You by implication or otherwise. You agree not to: (i) prepare derivative works from, modify, copy or use the Software in any manner except as expressly permitted in this Agreement or applicable law; (ii) transfer, sell, rent, lease, distribute, sublicense, loan or otherwise transfer the Software in whole or in part to any third party; (iii) use the Software for providing time-sharing services, any software-as-a-service offering (“SaaS”), service bureau services or as part of an application services provider or other service offering; (iv) alter or remove any proprietary notices in the Software; or (v) make available to any third party any analysis of the results of operation of the Software, including benchmarking results, without the prior written consent of Elastic. +3.3 Warranty Disclaimer. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” WITHOUT WARRANTY OF ANY KIND, AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR STATUTORY REGARDING OR RELATING TO THE SOFTWARE OR DOCUMENTATION. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE LAW, ELASTIC AND ITS LICENSORS SPECIFICALLY DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE SOFTWARE AND DOCUMENTATION, AND WITH RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS OF USE OR THAT THE SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE SOFTWARE WILL BE UNINTERRUPTED. +4.1 Disclaimer of Certain Damages. IN NO EVENT SHALL YOU OR ELASTIC OR ITS LICENSORS BE LIABLE FOR ANY LOSS OF PROFITS, LOSS OF USE, BUSINESS INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY SPECIALINDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND IN CONNECTION WITH OR ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE, OR THE PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF THE RESPONSIBLE PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. THE LIMITATIONS OF LIABILITY SET FORTH IN THIS SECTION 4.1 SHALL NOT APPLY TO A BREACH THROUGH GROSS NEGLIGENCE OR INTENTIONAL MISCONDUCT BY YOU OF THE SCOPE OF THE LICENSE GRANTED IN SECTION 1.1 OR TO ANY OTHER LIABILITY THAT CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE LAW. diff --git a/plugin/licenses/x-pack-core-NOTICE.txt b/plugin/licenses/x-pack-core-NOTICE.txt new file mode 100644 index 00000000000..3aa4dffcd74 --- /dev/null +++ b/plugin/licenses/x-pack-core-NOTICE.txt @@ -0,0 +1,2 @@ +Elasticsearch X-Pack +Copyright 2009-2017 Elasticsearch diff --git a/plugin/src/main/java/org/elasticsearch/license/Licensing.java b/plugin/src/main/java/org/elasticsearch/license/Licensing.java index cb82bbbe081..08c864d6cf9 100644 --- a/plugin/src/main/java/org/elasticsearch/license/Licensing.java +++ b/plugin/src/main/java/org/elasticsearch/license/Licensing.java @@ -29,7 +29,7 @@ import java.util.Collections; import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.XPackPlugin.isTribeNode; +import static org.elasticsearch.xpack.XPackClientActionPlugin.isTribeNode; import static org.elasticsearch.xpack.XPackPlugin.transportClientMode; public class Licensing implements ActionPlugin { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/XPackPlugin.java b/plugin/src/main/java/org/elasticsearch/xpack/XPackPlugin.java index c8a1ad03f06..e9b08c2ed59 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/XPackPlugin.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/XPackPlugin.java @@ -84,7 +84,7 @@ import org.elasticsearch.xpack.persistent.UpdatePersistentTaskStatusAction; import org.elasticsearch.xpack.rest.action.RestXPackInfoAction; import org.elasticsearch.xpack.rest.action.RestXPackUsageAction; import org.elasticsearch.xpack.security.Security; -import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.elasticsearch.xpack.security.authc.AuthenticationServiceField; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.sql.plugin.SqlLicenseChecker; import org.elasticsearch.xpack.sql.plugin.SqlPlugin; @@ -118,38 +118,6 @@ import java.util.stream.Stream; public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, IngestPlugin, NetworkPlugin, ClusterPlugin, DiscoveryPlugin, MapperPlugin { - public static final String NAME = "x-pack"; - - /** Name constant for the security feature. */ - public static final String SECURITY = "security"; - - /** Name constant for the monitoring feature. */ - public static final String MONITORING = "monitoring"; - - /** Name constant for the watcher feature. */ - public static final String WATCHER = "watcher"; - - /** Name constant for the graph feature. */ - public static final String GRAPH = "graph"; - - /** Name constant for the machine learning feature. */ - public static final String MACHINE_LEARNING = "ml"; - - /** Name constant for the Logstash feature. */ - public static final String LOGSTASH = "logstash"; - - /** Name constant for the Deprecation API feature. */ - public static final String DEPRECATION = "deprecation"; - - /** Name constant for the upgrade feature. */ - public static final String UPGRADE = "upgrade"; - - /** Name constant for the sql feature. */ - public static final String SQL = "sql"; - - // inside of YAML settings we still use xpack do not having handle issues with dashes - private static final String SETTINGS_NAME = "xpack"; - // TODO: clean up this library to not ask for write access to all system properties! static { // invoke this clinit in unbound with permissions to access all system properties @@ -221,7 +189,7 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I this.sql = new SqlPlugin(XPackSettings.SQL_ENABLED.get(settings), new SqlLicenseChecker( () -> { if (!licenseState.isSqlAllowed()) { - throw LicenseUtils.newComplianceException(XPackPlugin.SQL); + throw LicenseUtils.newComplianceException(XpackField.SQL); } }, () -> { @@ -331,8 +299,8 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I } Set headers = new HashSet<>(); headers.add(UsernamePasswordToken.BASIC_AUTH_HEADER); - if (AuthenticationService.RUN_AS_ENABLED.get(settings)) { - headers.add(AuthenticationService.RUN_AS_USER_HEADER); + if (AuthenticationServiceField.RUN_AS_ENABLED.get(settings)) { + headers.add(AuthenticationServiceField.RUN_AS_USER_HEADER); } headers.addAll(extensionsService.getExtensions().stream() .flatMap(e -> e.getRestHeaders().stream()).collect(Collectors.toList())); @@ -454,7 +422,7 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I @Override public List getNamedWriteables() { List entries = new ArrayList<>(); - entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, LOGSTASH, LogstashFeatureSet.Usage::new)); + entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XpackField.LOGSTASH, LogstashFeatureSet.Usage::new)); entries.addAll(watcher.getNamedWriteables()); entries.addAll(machineLearning.getNamedWriteables()); entries.addAll(licensing.getNamedWriteables()); @@ -500,24 +468,12 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I return TransportClient.CLIENT_TYPE.equals(settings.get(Client.CLIENT_TYPE_SETTING_S.getKey())); } - public static boolean isTribeNode(Settings settings) { - return settings.getGroups("tribe", true).isEmpty() == false; - } - - public static boolean isTribeClientNode(Settings settings) { - return settings.get("tribe.name") != null; - } - public static Path resolveConfigFile(Environment env, String name) { - return env.configFile().resolve(NAME).resolve(name); - } - - public static String featureSettingPrefix(String featureName) { - return SETTINGS_NAME + "." + featureName; + return env.configFile().resolve(XpackField.NAME).resolve(name); } public static Path resolveXPackExtensionsFile(Environment env) { - return env.pluginsFile().resolve(XPackPlugin.NAME).resolve("extensions"); + return env.pluginsFile().resolve(XpackField.NAME).resolve("extensions"); } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java index 588b0ef8a55..42d2739a46c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java @@ -28,7 +28,7 @@ import java.util.function.Supplier; public class Deprecation implements ActionPlugin { @Override public List> getActions() { - return Collections.singletonList(new ActionHandler<>(DeprecationInfoAction.INSTANCE, DeprecationInfoAction.TransportAction.class)); + return Collections.singletonList(new ActionHandler<>(DeprecationInfoAction.INSTANCE, TransportDeprecationInfoAction.class)); } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index b16c06fccc2..ef39595dbe1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -53,15 +53,4 @@ public class DeprecationChecks { IndexDeprecationChecks::indexStoreTypeCheck, IndexDeprecationChecks::storeThrottleSettingsCheck)); - /** - * helper utility function to reduce repeat of running a specific {@link Set} of checks. - * - * @param checks The functional checks to execute using the mapper function - * @param mapper The function that executes the lambda check with the appropriate arguments - * @param The signature of the check (BiFunction, Function, including the appropriate arguments) - * @return The list of {@link DeprecationIssue} that were found in the cluster - */ - static List filterChecks(List checks, Function mapper) { - return checks.stream().map(mapper).filter(Objects::nonNull).collect(Collectors.toList()); - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java new file mode 100644 index 00000000000..327e11ce8a8 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ClientHelper; +import org.elasticsearch.xpack.XpackField; + +public class TransportDeprecationInfoAction extends TransportMasterNodeReadAction { + + private final XPackLicenseState licenseState; + private final NodeClient client; + private final IndexNameExpressionResolver indexNameExpressionResolver; + + @Inject + public TransportDeprecationInfoAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + XPackLicenseState licenseState, NodeClient client) { + super(settings, DeprecationInfoAction.NAME, transportService, clusterService, threadPool, actionFilters, + DeprecationInfoAction.Request::new, indexNameExpressionResolver); + this.licenseState = licenseState; + this.client = client; + this.indexNameExpressionResolver = indexNameExpressionResolver; + } + + @Override + protected String executor() { + return ThreadPool.Names.GENERIC; + } + + @Override + protected DeprecationInfoAction.Response newResponse() { + return new DeprecationInfoAction.Response(); + } + + @Override + protected ClusterBlockException checkBlock(DeprecationInfoAction.Request request, ClusterState state) { + // Cluster is not affected but we look up repositories in metadata + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } + + @Override + protected final void masterOperation(final DeprecationInfoAction.Request request, ClusterState state, + final ActionListener listener) { + if (licenseState.isDeprecationAllowed()) { + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest("_local").settings(true).plugins(true); + NodesStatsRequest nodesStatsRequest = new NodesStatsRequest("_local").fs(true); + + final ThreadContext threadContext = client.threadPool().getThreadContext(); + ClientHelper.executeAsyncWithOrigin(threadContext, ClientHelper.DEPRECATION_ORIGIN, nodesInfoRequest, + ActionListener.wrap( + nodesInfoResponse -> { + if (nodesInfoResponse.hasFailures()) { + throw nodesInfoResponse.failures().get(0); + } + ClientHelper.executeAsyncWithOrigin(threadContext, ClientHelper.DEPRECATION_ORIGIN, nodesStatsRequest, + ActionListener.wrap( + nodesStatsResponse -> { + if (nodesStatsResponse.hasFailures()) { + throw nodesStatsResponse.failures().get(0); + } + listener.onResponse(DeprecationInfoAction.Response.from(nodesInfoResponse.getNodes(), + nodesStatsResponse.getNodes(), state, indexNameExpressionResolver, + request.indices(), request.indicesOptions(), + DeprecationChecks.CLUSTER_SETTINGS_CHECKS, DeprecationChecks.NODE_SETTINGS_CHECKS, + DeprecationChecks.INDEX_SETTINGS_CHECKS)); + }, listener::onFailure), + client.admin().cluster()::nodesStats); + }, listener::onFailure), client.admin().cluster()::nodesInfo); + } else { + listener.onFailure(LicenseUtils.newComplianceException(XpackField.DEPRECATION)); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/Graph.java b/plugin/src/main/java/org/elasticsearch/xpack/graph/Graph.java index 6cfd07b1377..8d57057f254 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/graph/Graph.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/graph/Graph.java @@ -33,7 +33,7 @@ import java.util.function.Supplier; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; -import static org.elasticsearch.xpack.XPackPlugin.GRAPH; +import static org.elasticsearch.xpack.XpackField.GRAPH; public class Graph implements ActionPlugin { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/GraphFeatureSet.java b/plugin/src/main/java/org/elasticsearch/xpack/graph/GraphFeatureSet.java index f19c0081e1e..1d0b896e185 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/graph/GraphFeatureSet.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/graph/GraphFeatureSet.java @@ -15,8 +15,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.XPackFeatureSet; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; public class GraphFeatureSet implements XPackFeatureSet { @@ -31,7 +31,7 @@ public class GraphFeatureSet implements XPackFeatureSet { @Override public String name() { - return XPackPlugin.GRAPH; + return XpackField.GRAPH; } @Override @@ -66,7 +66,7 @@ public class GraphFeatureSet implements XPackFeatureSet { } public Usage(boolean available, boolean enabled) { - super(XPackPlugin.GRAPH, available, enabled); + super(XpackField.GRAPH, available, enabled); } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/plugin/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index e38ea9ad343..0fd1b582dff 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -38,7 +38,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilde import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.graph.action.Connection.ConnectionId; import org.elasticsearch.xpack.graph.action.GraphExploreRequest.TermBoost; import org.elasticsearch.xpack.graph.action.Vertex.VertexId; @@ -89,7 +89,7 @@ public class TransportGraphExploreAction extends HandledTransportAction CONCURRENT_JOB_ALLOCATIONS = Setting.intSetting("xpack.ml.node_concurrent_job_allocations", 2, 0, Property.Dynamic, Property.NodeScope); - public static final Setting MAX_MODEL_MEMORY_LIMIT = - Setting.memorySizeSetting("xpack.ml.max_model_memory_limit", new ByteSizeValue(0), Property.Dynamic, Property.NodeScope); public static final Setting MAX_MACHINE_MEMORY_PERCENT = Setting.intSetting("xpack.ml.max_machine_memory_percent", 30, 5, 90, Property.Dynamic, Property.NodeScope); - public static final TimeValue STATE_PERSIST_RESTORE_TIMEOUT = TimeValue.timeValueMinutes(30); - private static final Logger logger = Loggers.getLogger(XPackPlugin.class); private final Settings settings; @@ -221,8 +260,8 @@ public class MachineLearning implements ActionPlugin { this.licenseState = licenseState; this.enabled = XPackSettings.MACHINE_LEARNING_ENABLED.get(settings); this.transportClientMode = XPackPlugin.transportClientMode(settings); - this.tribeNode = XPackPlugin.isTribeNode(settings); - this.tribeNodeClient = XPackPlugin.isTribeClientNode(settings); + this.tribeNode = XPackClientActionPlugin.isTribeNode(settings); + this.tribeNodeClient = XPackClientActionPlugin.isTribeClientNode(settings); } public List> getSettings() { @@ -410,8 +449,8 @@ public class MachineLearning implements ActionPlugin { } return Arrays.asList( - new OpenJobAction.OpenJobPersistentTasksExecutor(settings, clusterService, autodetectProcessManager.get()), - new StartDatafeedAction.StartDatafeedPersistentTasksExecutor(settings, datafeedManager.get()) + new TransportOpenJobAction.OpenJobPersistentTasksExecutor(settings, clusterService, autodetectProcessManager.get()), + new TransportStartDatafeedAction.StartDatafeedPersistentTasksExecutor(settings, datafeedManager.get()) ); } @@ -487,49 +526,49 @@ public class MachineLearning implements ActionPlugin { return emptyList(); } return Arrays.asList( - new ActionHandler<>(GetJobsAction.INSTANCE, GetJobsAction.TransportAction.class), - new ActionHandler<>(GetJobsStatsAction.INSTANCE, GetJobsStatsAction.TransportAction.class), - new ActionHandler<>(PutJobAction.INSTANCE, PutJobAction.TransportAction.class), - new ActionHandler<>(UpdateJobAction.INSTANCE, UpdateJobAction.TransportAction.class), - new ActionHandler<>(DeleteJobAction.INSTANCE, DeleteJobAction.TransportAction.class), - new ActionHandler<>(OpenJobAction.INSTANCE, OpenJobAction.TransportAction.class), - new ActionHandler<>(GetFiltersAction.INSTANCE, GetFiltersAction.TransportAction.class), - new ActionHandler<>(PutFilterAction.INSTANCE, PutFilterAction.TransportAction.class), - new ActionHandler<>(DeleteFilterAction.INSTANCE, DeleteFilterAction.TransportAction.class), - new ActionHandler<>(KillProcessAction.INSTANCE, KillProcessAction.TransportAction.class), - new ActionHandler<>(GetBucketsAction.INSTANCE, GetBucketsAction.TransportAction.class), - new ActionHandler<>(GetInfluencersAction.INSTANCE, GetInfluencersAction.TransportAction.class), - new ActionHandler<>(GetOverallBucketsAction.INSTANCE, GetOverallBucketsAction.TransportAction.class), - new ActionHandler<>(GetRecordsAction.INSTANCE, GetRecordsAction.TransportAction.class), - new ActionHandler<>(PostDataAction.INSTANCE, PostDataAction.TransportAction.class), - new ActionHandler<>(CloseJobAction.INSTANCE, CloseJobAction.TransportAction.class), - new ActionHandler<>(FinalizeJobExecutionAction.INSTANCE, FinalizeJobExecutionAction.TransportAction.class), - new ActionHandler<>(FlushJobAction.INSTANCE, FlushJobAction.TransportAction.class), - new ActionHandler<>(ValidateDetectorAction.INSTANCE, ValidateDetectorAction.TransportAction.class), - new ActionHandler<>(ValidateJobConfigAction.INSTANCE, ValidateJobConfigAction.TransportAction.class), - new ActionHandler<>(GetCategoriesAction.INSTANCE, GetCategoriesAction.TransportAction.class), - new ActionHandler<>(GetModelSnapshotsAction.INSTANCE, GetModelSnapshotsAction.TransportAction.class), - new ActionHandler<>(RevertModelSnapshotAction.INSTANCE, RevertModelSnapshotAction.TransportAction.class), - new ActionHandler<>(UpdateModelSnapshotAction.INSTANCE, UpdateModelSnapshotAction.TransportAction.class), - new ActionHandler<>(GetDatafeedsAction.INSTANCE, GetDatafeedsAction.TransportAction.class), - new ActionHandler<>(GetDatafeedsStatsAction.INSTANCE, GetDatafeedsStatsAction.TransportAction.class), - new ActionHandler<>(PutDatafeedAction.INSTANCE, PutDatafeedAction.TransportAction.class), - new ActionHandler<>(UpdateDatafeedAction.INSTANCE, UpdateDatafeedAction.TransportAction.class), - new ActionHandler<>(DeleteDatafeedAction.INSTANCE, DeleteDatafeedAction.TransportAction.class), - new ActionHandler<>(PreviewDatafeedAction.INSTANCE, PreviewDatafeedAction.TransportAction.class), - new ActionHandler<>(StartDatafeedAction.INSTANCE, StartDatafeedAction.TransportAction.class), - new ActionHandler<>(StopDatafeedAction.INSTANCE, StopDatafeedAction.TransportAction.class), - new ActionHandler<>(IsolateDatafeedAction.INSTANCE, IsolateDatafeedAction.TransportAction.class), - new ActionHandler<>(DeleteModelSnapshotAction.INSTANCE, DeleteModelSnapshotAction.TransportAction.class), - new ActionHandler<>(UpdateProcessAction.INSTANCE, UpdateProcessAction.TransportAction.class), - new ActionHandler<>(DeleteExpiredDataAction.INSTANCE, DeleteExpiredDataAction.TransportAction.class), - new ActionHandler<>(ForecastJobAction.INSTANCE, ForecastJobAction.TransportAction.class), - new ActionHandler<>(GetCalendarsAction.INSTANCE, GetCalendarsAction.TransportAction.class), - new ActionHandler<>(PutCalendarAction.INSTANCE, PutCalendarAction.TransportAction.class), - new ActionHandler<>(DeleteCalendarAction.INSTANCE, DeleteCalendarAction.TransportAction.class), - new ActionHandler<>(UpdateCalendarJobAction.INSTANCE, UpdateCalendarJobAction.TransportAction.class), - new ActionHandler<>(GetCalendarEventsAction.INSTANCE, GetCalendarEventsAction.TransportAction.class), - new ActionHandler<>(PostCalendarEventsAction.INSTANCE, PostCalendarEventsAction.TransportAction.class) + new ActionHandler<>(GetJobsAction.INSTANCE, TransportGetJobsAction.class), + new ActionHandler<>(GetJobsStatsAction.INSTANCE, TransportGetJobsStatsAction.class), + new ActionHandler<>(PutJobAction.INSTANCE, TransportPutJobAction.class), + new ActionHandler<>(UpdateJobAction.INSTANCE, TransportUpdateJobAction.class), + new ActionHandler<>(DeleteJobAction.INSTANCE, TransportDeleteJobAction.class), + new ActionHandler<>(OpenJobAction.INSTANCE, TransportOpenJobAction.class), + new ActionHandler<>(GetFiltersAction.INSTANCE, TransportGetFiltersAction.class), + new ActionHandler<>(PutFilterAction.INSTANCE, TransportPutFilterAction.class), + new ActionHandler<>(DeleteFilterAction.INSTANCE, TransportDeleteFilterAction.class), + new ActionHandler<>(KillProcessAction.INSTANCE, TransportKillProcessAction.class), + new ActionHandler<>(GetBucketsAction.INSTANCE, TransportGetBucketsAction.class), + new ActionHandler<>(GetInfluencersAction.INSTANCE, TransportGetInfluencersAction.class), + new ActionHandler<>(GetOverallBucketsAction.INSTANCE, TransportGetOverallBucketsAction.class), + new ActionHandler<>(GetRecordsAction.INSTANCE, TransportGetRecordsAction.class), + new ActionHandler<>(PostDataAction.INSTANCE, TransportPostDataAction.class), + new ActionHandler<>(CloseJobAction.INSTANCE, TransportCloseJobAction.class), + new ActionHandler<>(FinalizeJobExecutionAction.INSTANCE, TransportFinalizeJobExecutionAction.class), + new ActionHandler<>(FlushJobAction.INSTANCE, TransportFlushJobAction.class), + new ActionHandler<>(ValidateDetectorAction.INSTANCE, TransportValidateDetectorAction.class), + new ActionHandler<>(ValidateJobConfigAction.INSTANCE, TransportValidateJobConfigAction.class), + new ActionHandler<>(GetCategoriesAction.INSTANCE, TransportGetCategoriesAction.class), + new ActionHandler<>(GetModelSnapshotsAction.INSTANCE, TransportGetModelSnapshotsAction.class), + new ActionHandler<>(RevertModelSnapshotAction.INSTANCE, TransportRevertModelSnapshotAction.class), + new ActionHandler<>(UpdateModelSnapshotAction.INSTANCE, TransportUpdateModelSnapshotAction.class), + new ActionHandler<>(GetDatafeedsAction.INSTANCE, TransportGetDatafeedsAction.class), + new ActionHandler<>(GetDatafeedsStatsAction.INSTANCE, TransportGetDatafeedsStatsAction.class), + new ActionHandler<>(PutDatafeedAction.INSTANCE, TransportPutDatafeedAction.class), + new ActionHandler<>(UpdateDatafeedAction.INSTANCE, TransportUpdateDatafeedAction.class), + new ActionHandler<>(DeleteDatafeedAction.INSTANCE, TransportDeleteDatafeedAction.class), + new ActionHandler<>(PreviewDatafeedAction.INSTANCE, TransportPreviewDatafeedAction.class), + new ActionHandler<>(StartDatafeedAction.INSTANCE, TransportStartDatafeedAction.class), + new ActionHandler<>(StopDatafeedAction.INSTANCE, TransportStopDatafeedAction.class), + new ActionHandler<>(IsolateDatafeedAction.INSTANCE, TransportIsolateDatafeedAction.class), + new ActionHandler<>(DeleteModelSnapshotAction.INSTANCE, TransportDeleteModelSnapshotAction.class), + new ActionHandler<>(UpdateProcessAction.INSTANCE, TransportUpdateProcessAction.class), + new ActionHandler<>(DeleteExpiredDataAction.INSTANCE, TransportDeleteExpiredDataAction.class), + new ActionHandler<>(ForecastJobAction.INSTANCE, TransportForecastJobAction.class), + new ActionHandler<>(GetCalendarsAction.INSTANCE, TransportGetCalendarsAction.class), + new ActionHandler<>(PutCalendarAction.INSTANCE, TransportPutCalendarAction.class), + new ActionHandler<>(DeleteCalendarAction.INSTANCE, TransportDeleteCalendarAction.class), + new ActionHandler<>(UpdateCalendarJobAction.INSTANCE, TransportUpdateCalendarJobAction.class), + new ActionHandler<>(GetCalendarEventsAction.INSTANCE, TransportGetCalendarEventsAction.class), + new ActionHandler<>(PostCalendarEventsAction.INSTANCE, TransportPostCalendarEventsAction.class) ); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java index b5379951704..8074405129b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java @@ -18,14 +18,15 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.Platforms; +import org.elasticsearch.xpack.XPackClientActionPlugin; import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.ml.datafeed.DatafeedState; @@ -76,7 +77,7 @@ public class MachineLearningFeatureSet implements XPackFeatureSet { // if ML has been disabled because of some OS incompatibility. Also don't try to get the native // code version in the transport or tribe client - the controller process won't be running. if (enabled && XPackPlugin.transportClientMode(environment.settings()) == false - && XPackPlugin.isTribeClientNode(environment.settings()) == false) { + && XPackClientActionPlugin.isTribeClientNode(environment.settings()) == false) { try { if (isRunningOnMlPlatform(true)) { NativeController nativeController = NativeControllerHolder.getNativeController(environment); @@ -110,7 +111,7 @@ public class MachineLearningFeatureSet implements XPackFeatureSet { @Override public String name() { - return XPackPlugin.MACHINE_LEARNING; + return XpackField.MACHINE_LEARNING; } @Override @@ -136,7 +137,7 @@ public class MachineLearningFeatureSet implements XPackFeatureSet { @Override public void usage(ActionListener listener) { ClusterState state = clusterService.state(); - MlMetadata mlMetadata = state.getMetaData().custom(MlMetadata.TYPE); + MlMetadata mlMetadata = state.getMetaData().custom(MLMetadataField.TYPE); // Handle case when usage is called but MlMetadata has not been installed yet if (mlMetadata == null) { @@ -161,7 +162,7 @@ public class MachineLearningFeatureSet implements XPackFeatureSet { public Usage(boolean available, boolean enabled, Map jobsUsage, Map datafeedsUsage) { - super(XPackPlugin.MACHINE_LEARNING, available, enabled); + super(XpackField.MACHINE_LEARNING, available, enabled); this.jobsUsage = Objects.requireNonNull(jobsUsage); this.datafeedsUsage = Objects.requireNonNull(datafeedsUsage); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java index 0ca2762a984..5d715ade02e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java @@ -88,7 +88,7 @@ public class MlAssignmentNotifier extends AbstractComponent implements ClusterSt } } else if (StartDatafeedAction.TASK_NAME.equals(currentTask.getTaskName())) { String datafeedId = ((StartDatafeedAction.DatafeedParams) currentTask.getParams()).getDatafeedId(); - MlMetadata mlMetadata = event.state().getMetaData().custom(MlMetadata.TYPE); + MlMetadata mlMetadata = event.state().getMetaData().custom(MLMetadataField.TYPE); DatafeedConfig datafeedConfig = mlMetadata.getDatafeed(datafeedId); if (currentAssignment.getExecutorNode() == null) { String msg = "No node found to start datafeed [" + datafeedId +"]. Reasons [" + diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index a63a6e77381..d7b9011365a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -61,19 +61,19 @@ class MlInitializationService extends AbstractComponent implements ClusterStateL } private void installMlMetadata(MetaData metaData) { - if (metaData.custom(MlMetadata.TYPE) == null) { + if (metaData.custom(MLMetadataField.TYPE) == null) { if (installMlMetadataCheck.compareAndSet(false, true)) { threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> clusterService.submitStateUpdateTask("install-ml-metadata", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) throws Exception { // If the metadata has been added already don't try to update - if (currentState.metaData().custom(MlMetadata.TYPE) != null) { + if (currentState.metaData().custom(MLMetadataField.TYPE) != null) { return currentState; } ClusterState.Builder builder = new ClusterState.Builder(currentState); MetaData.Builder metadataBuilder = MetaData.builder(currentState.metaData()); - metadataBuilder.putCustom(MlMetadata.TYPE, MlMetadata.EMPTY_METADATA); + metadataBuilder.putCustom(MLMetadataField.TYPE, MlMetadata.EMPTY_METADATA); builder.metaData(metadataBuilder.build()); return builder.build(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java deleted file mode 100644 index a19ded838d1..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java +++ /dev/null @@ -1,677 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionListenerResponseHandler; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.tasks.BaseTasksRequest; -import org.elasticsearch.action.support.tasks.BaseTasksResponse; -import org.elasticsearch.action.support.tasks.TransportTasksAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.discovery.MasterNotDiscoveredException; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.ml.datafeed.DatafeedState; -import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.config.JobState; -import org.elasticsearch.xpack.ml.job.config.JobTaskStatus; -import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.notifications.Auditor; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; -import org.elasticsearch.xpack.persistent.PersistentTasksService; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; -import java.util.stream.Collectors; - -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; - -public class CloseJobAction extends Action { - - public static final CloseJobAction INSTANCE = new CloseJobAction(); - public static final String NAME = "cluster:admin/xpack/ml/job/close"; - - private CloseJobAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends BaseTasksRequest implements ToXContentObject { - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField FORCE = new ParseField("force"); - public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs"); - public static ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); - - static { - PARSER.declareString(Request::setJobId, Job.ID); - PARSER.declareString((request, val) -> - request.setCloseTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareBoolean(Request::setForce, FORCE); - PARSER.declareBoolean(Request::setAllowNoJobs, ALLOW_NO_JOBS); - } - - public static Request parseRequest(String jobId, XContentParser parser) { - Request request = PARSER.apply(parser, null); - if (jobId != null) { - request.setJobId(jobId); - } - return request; - } - - private String jobId; - private boolean force = false; - private boolean allowNoJobs = true; - // A big state can take a while to persist. For symmetry with the _open endpoint any - // changes here should be reflected there too. - private TimeValue timeout = MachineLearning.STATE_PERSIST_RESTORE_TIMEOUT; - - private String[] openJobIds; - - private boolean local; - - Request() { - openJobIds = new String[] {}; - } - - public Request(String jobId) { - this(); - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public void setJobId(String jobId) { - this.jobId = jobId; - } - - public TimeValue getCloseTimeout() { - return timeout; - } - - public void setCloseTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - public boolean isForce() { - return force; - } - - public void setForce(boolean force) { - this.force = force; - } - - public boolean allowNoJobs() { - return allowNoJobs; - } - - public void setAllowNoJobs(boolean allowNoJobs) { - this.allowNoJobs = allowNoJobs; - } - - public void setLocal(boolean local) { - this.local = local; - } - - public void setOpenJobIds(String [] openJobIds) { - this.openJobIds = openJobIds; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - jobId = in.readString(); - timeout = new TimeValue(in); - force = in.readBoolean(); - openJobIds = in.readStringArray(); - local = in.readBoolean(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - allowNoJobs = in.readBoolean(); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(jobId); - timeout.writeTo(out); - out.writeBoolean(force); - out.writeStringArray(openJobIds); - out.writeBoolean(local); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeBoolean(allowNoJobs); - } - } - - @Override - public boolean match(Task task) { - for (String id : openJobIds) { - if (OpenJobAction.JobTask.match(task, id)) { - return true; - } - } - return false; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - // openJobIds are excluded - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - builder.field(FORCE.getPreferredName(), force); - builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - // openJobIds are excluded - return Objects.hash(jobId, timeout, force, allowNoJobs); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || obj.getClass() != getClass()) { - return false; - } - Request other = (Request) obj; - // openJobIds are excluded - return Objects.equals(jobId, other.jobId) && - Objects.equals(timeout, other.timeout) && - Objects.equals(force, other.force) && - Objects.equals(allowNoJobs, other.allowNoJobs); - } - } - - static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client, CloseJobAction action) { - super(client, action, new Request()); - } - } - - public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { - - private boolean closed; - - Response() { - super(null, null); - - } - - Response(StreamInput in) throws IOException { - super(null, null); - readFrom(in); - } - - Response(boolean closed) { - super(null, null); - this.closed = closed; - } - - public boolean isClosed() { - return closed; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - closed = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(closed); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("closed", closed); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response response = (Response) o; - return closed == response.closed; - } - - @Override - public int hashCode() { - return Objects.hash(closed); - } - } - - public static class TransportAction extends TransportTasksAction { - - private final Client client; - private final ClusterService clusterService; - private final Auditor auditor; - private final PersistentTasksService persistentTasksService; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, Client client, - Auditor auditor, PersistentTasksService persistentTasksService) { - // We fork in innerTaskOperation(...), so we can use ThreadPool.Names.SAME here: - super(settings, CloseJobAction.NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, Request::new, Response::new, ThreadPool.Names.SAME); - this.client = client; - this.clusterService = clusterService; - this.auditor = auditor; - this.persistentTasksService = persistentTasksService; - } - - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - final ClusterState state = clusterService.state(); - final DiscoveryNodes nodes = state.nodes(); - if (request.local == false && nodes.isLocalNodeElectedMaster() == false) { - // Delegates close job to elected master node, so it becomes the coordinating node. - // See comment in OpenJobAction.Transport class for more information. - if (nodes.getMasterNode() == null) { - listener.onFailure(new MasterNotDiscoveredException("no known master node")); - } else { - transportService.sendRequest(nodes.getMasterNode(), actionName, request, - new ActionListenerResponseHandler<>(listener, Response::new)); - } - } else { - /* - * Closing of multiple jobs: - * - * 1. Resolve and validate jobs first: if any job does not meet the - * criteria (e.g. open datafeed), fail immediately, do not close any - * job - * - * 2. Internally a task request is created for every open job, so there - * are n inner tasks for 1 user request - * - * 3. No task is created for closing jobs but those will be waited on - * - * 4. Collect n inner task results or failures and send 1 outer - * result/failure - */ - - List openJobIds = new ArrayList<>(); - List closingJobIds = new ArrayList<>(); - resolveAndValidateJobId(request, state, openJobIds, closingJobIds); - request.setOpenJobIds(openJobIds.toArray(new String[0])); - if (openJobIds.isEmpty() && closingJobIds.isEmpty()) { - listener.onResponse(new Response(true)); - return; - } - - if (request.isForce() == false) { - Set executorNodes = new HashSet<>(); - PersistentTasksCustomMetaData tasks = state.metaData().custom(PersistentTasksCustomMetaData.TYPE); - for (String resolvedJobId : request.openJobIds) { - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(resolvedJobId, tasks); - if (jobTask == null || jobTask.isAssigned() == false) { - String message = "Cannot close job [" + resolvedJobId + "] because the job does not have an assigned node." + - " Use force close to close the job"; - listener.onFailure(ExceptionsHelper.conflictStatusException(message)); - return; - } else { - executorNodes.add(jobTask.getExecutorNode()); - } - } - request.setNodes(executorNodes.toArray(new String[executorNodes.size()])); - } - - if (request.isForce()) { - List jobIdsToForceClose = new ArrayList<>(openJobIds); - jobIdsToForceClose.addAll(closingJobIds); - forceCloseJob(state, request, jobIdsToForceClose, listener); - } else { - normalCloseJob(state, task, request, openJobIds, closingJobIds, listener); - } - } - } - - @Override - protected void taskOperation(Request request, OpenJobAction.JobTask jobTask, ActionListener listener) { - JobTaskStatus taskStatus = new JobTaskStatus(JobState.CLOSING, jobTask.getAllocationId()); - jobTask.updatePersistentStatus(taskStatus, ActionListener.wrap(task -> { - // we need to fork because we are now on a network threadpool and closeJob method may take a while to complete: - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - - @Override - protected void doRun() throws Exception { - jobTask.closeJob("close job (api)"); - listener.onResponse(new Response(true)); - } - }); - }, listener::onFailure)); - } - - @Override - protected Response newResponse(Request request, List tasks, - List taskOperationFailures, - List failedNodeExceptions) { - - // number of resolved jobs should be equal to the number of tasks, - // otherwise something went wrong - if (request.openJobIds.length != tasks.size()) { - if (taskOperationFailures.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(taskOperationFailures.get(0).getCause()); - } else if (failedNodeExceptions.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(failedNodeExceptions.get(0)); - } else { - // This can happen we the actual task in the node no longer exists, - // which means the job(s) have already been closed. - return new Response(true); - } - } - - return new Response(tasks.stream().allMatch(Response::isClosed)); - } - - @Override - protected Response readTaskResponse(StreamInput in) throws IOException { - return new Response(in); - } - - private void forceCloseJob(ClusterState currentState, Request request, List jobIdsToForceClose, - ActionListener listener) { - PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - - final int numberOfJobs = jobIdsToForceClose.size(); - final AtomicInteger counter = new AtomicInteger(); - final AtomicArray failures = new AtomicArray<>(numberOfJobs); - - for (String jobId : jobIdsToForceClose) { - PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); - if (jobTask != null) { - auditor.info(jobId, Messages.JOB_AUDIT_FORCE_CLOSING); - persistentTasksService.cancelPersistentTask(jobTask.getId(), - new ActionListener>() { - @Override - public void onResponse(PersistentTask task) { - if (counter.incrementAndGet() == numberOfJobs) { - sendResponseOrFailure(request.getJobId(), listener, failures); - } - } - - @Override - public void onFailure(Exception e) { - final int slot = counter.incrementAndGet(); - failures.set(slot - 1, e); - if (slot == numberOfJobs) { - sendResponseOrFailure(request.getJobId(), listener, failures); - } - } - - private void sendResponseOrFailure(String jobId, - ActionListener listener, - AtomicArray failures) { - List catchedExceptions = failures.asList(); - if (catchedExceptions.size() == 0) { - listener.onResponse(new Response(true)); - return; - } - - String msg = "Failed to force close job [" + jobId + "] with [" - + catchedExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" - + catchedExceptions.stream().map(Exception::getMessage) - .collect(Collectors.joining(", ")) - + "]"; - - ElasticsearchException e = new ElasticsearchException(msg, - catchedExceptions.get(0)); - listener.onFailure(e); - } - }); - } - } - } - - private void normalCloseJob(ClusterState currentState, Task task, Request request, - List openJobIds, List closingJobIds, - ActionListener listener) { - PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - - WaitForCloseRequest waitForCloseRequest = buildWaitForCloseRequest(openJobIds, closingJobIds, tasks, auditor); - - // If there are no open or closing jobs in the request return - if (waitForCloseRequest.hasJobsToWaitFor() == false) { - listener.onResponse(new Response(true)); - return; - } - - boolean noOpenJobsToClose = openJobIds.isEmpty(); - if (noOpenJobsToClose) { - // No jobs to close but we still want to wait on closing jobs in the request - waitForJobClosed(request, waitForCloseRequest, new Response(true), listener); - return; - } - - ActionListener finalListener = - ActionListener.wrap( - r -> waitForJobClosed(request, waitForCloseRequest, - r, listener), - listener::onFailure); - super.doExecute(task, request, finalListener); - } - - static class WaitForCloseRequest { - List persistentTaskIds = new ArrayList<>(); - List jobsToFinalize = new ArrayList<>(); - - public boolean hasJobsToWaitFor() { - return persistentTaskIds.isEmpty() == false; - } - } - - // Wait for job to be marked as closed in cluster state, which means the job persistent task has been removed - // This api returns when job has been closed, but that doesn't mean the persistent task has been removed from cluster state, - // so wait for that to happen here. - void waitForJobClosed(Request request, WaitForCloseRequest waitForCloseRequest, Response response, - ActionListener listener) { - persistentTasksService.waitForPersistentTasksStatus(persistentTasksCustomMetaData -> { - for (String persistentTaskId : waitForCloseRequest.persistentTaskIds) { - if (persistentTasksCustomMetaData.getTask(persistentTaskId) != null) { - return false; - } - } - return true; - }, request.getCloseTimeout(), new ActionListener() { - @Override - public void onResponse(Boolean result) { - FinalizeJobExecutionAction.Request finalizeRequest = new FinalizeJobExecutionAction.Request( - waitForCloseRequest.jobsToFinalize.toArray(new String[0])); - executeAsyncWithOrigin(client, ML_ORIGIN, FinalizeJobExecutionAction.INSTANCE, finalizeRequest, - ActionListener.wrap(r -> listener.onResponse(response), listener::onFailure)); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - } - } - - /** - * Resolve the requested jobs and add their IDs to one of the list arguments - * depending on job state. - * - * Opened jobs are added to {@code openJobIds} and closing jobs added to {@code closingJobIds}. Failed jobs are added - * to {@code openJobIds} if allowFailed is set otherwise an exception is thrown. - * @param request The close job request - * @param state Cluster state - * @param openJobIds Opened or failed jobs are added to this list - * @param closingJobIds Closing jobs are added to this list - */ - static void resolveAndValidateJobId(Request request, ClusterState state, List openJobIds, List closingJobIds) { - PersistentTasksCustomMetaData tasksMetaData = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - MlMetadata maybeNull = state.metaData().custom(MlMetadata.TYPE); - final MlMetadata mlMetadata = (maybeNull == null) ? MlMetadata.EMPTY_METADATA : maybeNull; - - List failedJobs = new ArrayList<>(); - - Consumer jobIdProcessor = id -> { - validateJobAndTaskState(id, mlMetadata, tasksMetaData); - Job job = mlMetadata.getJobs().get(id); - if (job.isDeleted()) { - return; - } - addJobAccordingToState(id, tasksMetaData, openJobIds, closingJobIds, failedJobs); - }; - - Set expandedJobIds = mlMetadata.expandJobIds(request.getJobId(), request.allowNoJobs()); - expandedJobIds.stream().forEach(jobIdProcessor::accept); - if (request.isForce() == false && failedJobs.size() > 0) { - if (expandedJobIds.size() == 1) { - throw ExceptionsHelper.conflictStatusException("cannot close job [{}] because it failed, use force close", - expandedJobIds.iterator().next()); - } - throw ExceptionsHelper.conflictStatusException("one or more jobs have state failed, use force close"); - } - - // allowFailed == true - openJobIds.addAll(failedJobs); - } - - private static void addJobAccordingToState(String jobId, PersistentTasksCustomMetaData tasksMetaData, - List openJobs, List closingJobs, List failedJobs) { - - JobState jobState = MlMetadata.getJobState(jobId, tasksMetaData); - switch (jobState) { - case CLOSING: - closingJobs.add(jobId); - break; - case FAILED: - failedJobs.add(jobId); - break; - case OPENING: - case OPENED: - openJobs.add(jobId); - break; - default: - break; - } - } - - static TransportAction.WaitForCloseRequest buildWaitForCloseRequest(List openJobIds, List closingJobIds, - PersistentTasksCustomMetaData tasks, Auditor auditor) { - TransportAction.WaitForCloseRequest waitForCloseRequest = new TransportAction.WaitForCloseRequest(); - - for (String jobId : openJobIds) { - PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); - if (jobTask != null) { - auditor.info(jobId, Messages.JOB_AUDIT_CLOSING); - waitForCloseRequest.persistentTaskIds.add(jobTask.getId()); - waitForCloseRequest.jobsToFinalize.add(jobId); - } - } - for (String jobId : closingJobIds) { - PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); - if (jobTask != null) { - waitForCloseRequest.persistentTaskIds.add(jobTask.getId()); - } - } - - return waitForCloseRequest; - } - - /** - * Validate the close request. Throws an exception on any of these conditions: - *
      - *
    • If the job does not exist
    • - *
    • If the job has a data feed the feed must be closed first
    • - *
    • If the job is opening
    • - *
    - * - * If the job is already closed an empty Optional is returned. - * @param jobId Job Id - * @param mlMetadata ML MetaData - * @param tasks Persistent tasks - */ - static void validateJobAndTaskState(String jobId, MlMetadata mlMetadata, PersistentTasksCustomMetaData tasks) { - Job job = mlMetadata.getJobs().get(jobId); - if (job == null) { - throw new ResourceNotFoundException("cannot close job, because job [" + jobId + "] does not exist"); - } - - Optional datafeed = mlMetadata.getDatafeedByJobId(jobId); - if (datafeed.isPresent()) { - DatafeedState datafeedState = MlMetadata.getDatafeedState(datafeed.get().getId(), tasks); - if (datafeedState != DatafeedState.STOPPED) { - throw ExceptionsHelper.conflictStatusException("cannot close job [{}], datafeed hasn't been stopped", jobId); - } - } - } -} - diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java deleted file mode 100644 index 50523d9eec9..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.persistent.PersistentTasksService; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; - -public class DeleteDatafeedAction extends Action { - - public static final DeleteDatafeedAction INSTANCE = new DeleteDatafeedAction(); - public static final String NAME = "cluster:admin/xpack/ml/datafeeds/delete"; - - private DeleteDatafeedAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends AcknowledgedRequest implements ToXContentFragment { - - public static final ParseField FORCE = new ParseField("force"); - - private String datafeedId; - private boolean force; - - public Request(String datafeedId) { - this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); - } - - Request() { - } - - public String getDatafeedId() { - return datafeedId; - } - - public boolean isForce() { - return force; - } - - public void setForce(boolean force) { - this.force = force; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - datafeedId = in.readString(); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - force = in.readBoolean(); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(datafeedId); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeBoolean(force); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Request other = (Request) o; - return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(force, other.force); - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, force); - } - } - - public static class RequestBuilder extends MasterNodeOperationRequestBuilder { - - public RequestBuilder(ElasticsearchClient client, DeleteDatafeedAction action) { - super(client, action, new Request()); - } - } - - public static class Response extends AcknowledgedResponse { - - private Response() { - } - - private Response(boolean acknowledged) { - super(acknowledged); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - readAcknowledged(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - writeAcknowledged(out); - } - } - - public static class TransportAction extends TransportMasterNodeAction { - - private Client client; - private PersistentTasksService persistentTasksService; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client, PersistentTasksService persistentTasksService) { - super(settings, DeleteDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, Request::new); - this.client = client; - this.persistentTasksService = persistentTasksService; - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { - if (request.isForce()) { - forceDeleteDatafeed(request, state, listener); - } else { - deleteDatafeedFromMetadata(request, listener); - } - } - - private void forceDeleteDatafeed(Request request, ClusterState state, ActionListener listener) { - ActionListener finalListener = ActionListener.wrap( - response -> deleteDatafeedFromMetadata(request, listener), - listener::onFailure - ); - - ActionListener isolateDatafeedHandler = ActionListener.wrap( - response -> removeDatafeedTask(request, state, finalListener), - listener::onFailure - ); - - IsolateDatafeedAction.Request isolateDatafeedRequest = new IsolateDatafeedAction.Request(request.getDatafeedId()); - executeAsyncWithOrigin(client, ML_ORIGIN, IsolateDatafeedAction.INSTANCE, isolateDatafeedRequest, isolateDatafeedHandler); - } - - private void removeDatafeedTask(Request request, ClusterState state, ActionListener listener) { - PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(request.getDatafeedId(), tasks); - if (datafeedTask == null) { - listener.onResponse(true); - } else { - persistentTasksService.cancelPersistentTask(datafeedTask.getId(), - new ActionListener>() { - @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof ResourceNotFoundException) { - // the task has been removed in between - listener.onResponse(true); - } else { - listener.onFailure(e); - } - } - }); - } - } - - private void deleteDatafeedFromMetadata(Request request, ActionListener listener) { - clusterService.submitStateUpdateTask("delete-datafeed-" + request.getDatafeedId(), - new AckedClusterStateUpdateTask(request, listener) { - - @Override - protected Response newResponse(boolean acknowledged) { - return new Response(acknowledged); - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - MlMetadata currentMetadata = currentState.getMetaData().custom(MlMetadata.TYPE); - PersistentTasksCustomMetaData persistentTasks = - currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) - .removeDatafeed(request.getDatafeedId(), persistentTasks).build(); - return ClusterState.builder(currentState).metaData( - MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build()) - .build(); - } - }); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java deleted file mode 100644 index ba1598b851d..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetaIndex; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.job.config.Detector; -import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.config.MlFilter; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; - - -public class DeleteFilterAction extends Action { - - public static final DeleteFilterAction INSTANCE = new DeleteFilterAction(); - public static final String NAME = "cluster:admin/xpack/ml/filters/delete"; - - private DeleteFilterAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends AcknowledgedRequest { - - public static final ParseField FILTER_ID = new ParseField("filter_id"); - - private String filterId; - - Request() { - - } - - public Request(String filterId) { - this.filterId = ExceptionsHelper.requireNonNull(filterId, FILTER_ID.getPreferredName()); - } - - public String getFilterId() { - return filterId; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - filterId = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(filterId); - } - - @Override - public int hashCode() { - return Objects.hash(filterId); - } - - @Override - public boolean equals(Object obj) { - if (obj == null || getClass() != obj.getClass()) { - return false; - } - Request other = (Request) obj; - return Objects.equals(filterId, other.filterId); - } - } - - public static class RequestBuilder extends MasterNodeOperationRequestBuilder { - - public RequestBuilder(ElasticsearchClient client, DeleteFilterAction action) { - super(client, action, new Request()); - } - } - - public static class Response extends AcknowledgedResponse { - - public Response(boolean acknowledged) { - super(acknowledged); - } - - private Response() {} - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - readAcknowledged(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - writeAcknowledged(out); - } - } - - public static class TransportAction extends HandledTransportAction { - - private final Client client; - private final ClusterService clusterService; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, Client client) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.clusterService = clusterService; - this.client = client; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - - final String filterId = request.getFilterId(); - ClusterState state = clusterService.state(); - MlMetadata currentMlMetadata = state.metaData().custom(MlMetadata.TYPE); - Map jobs = currentMlMetadata.getJobs(); - List currentlyUsedBy = new ArrayList<>(); - for (Job job : jobs.values()) { - List detectors = job.getAnalysisConfig().getDetectors(); - for (Detector detector : detectors) { - if (detector.extractReferencedFilters().contains(filterId)) { - currentlyUsedBy.add(job.getId()); - break; - } - } - } - if (!currentlyUsedBy.isEmpty()) { - throw ExceptionsHelper.conflictStatusException("Cannot delete filter, currently used by jobs: " - + currentlyUsedBy); - } - - DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - bulkRequestBuilder.add(deleteRequest); - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), - new ActionListener() { - @Override - public void onResponse(BulkResponse bulkResponse) { - if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { - listener.onFailure(new ResourceNotFoundException("Could not delete filter with ID [" + filterId - + "] because it does not exist")); - } else { - listener.onResponse(new Response(true)); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(ExceptionsHelper.serverError("Could not delete filter with ID [" + filterId + "]", e)); - } - }); - } - } -} - diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java deleted file mode 100644 index eba9421b21d..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java +++ /dev/null @@ -1,389 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateObserver; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.node.NodeClosedException; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.JobStorageDeletionTask; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.persistent.PersistentTasksService; - -import java.io.IOException; -import java.util.Objects; -import java.util.concurrent.TimeoutException; - -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; - -public class DeleteJobAction extends Action { - - public static final DeleteJobAction INSTANCE = new DeleteJobAction(); - public static final String NAME = "cluster:admin/xpack/ml/job/delete"; - - private DeleteJobAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends AcknowledgedRequest { - - private String jobId; - private boolean force; - - public Request(String jobId) { - this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - Request() {} - - public String getJobId() { - return jobId; - } - - public void setJobId(String jobId) { - this.jobId = jobId; - } - - public boolean isForce() { - return force; - } - - public void setForce(boolean force) { - this.force = force; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public Task createTask(long id, String type, String action, TaskId parentTaskId) { - return new JobStorageDeletionTask(id, type, action, "delete-job-" + jobId, parentTaskId); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - jobId = in.readString(); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - force = in.readBoolean(); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(jobId); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeBoolean(force); - } - } - - @Override - public int hashCode() { - return Objects.hash(jobId, force); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || obj.getClass() != getClass()) { - return false; - } - DeleteJobAction.Request other = (DeleteJobAction.Request) obj; - return Objects.equals(jobId, other.jobId) && Objects.equals(force, other.force); - } - } - - static class RequestBuilder extends MasterNodeOperationRequestBuilder { - - RequestBuilder(ElasticsearchClient client, DeleteJobAction action) { - super(client, action, new Request()); - } - } - - public static class Response extends AcknowledgedResponse { - - public Response(boolean acknowledged) { - super(acknowledged); - } - - private Response() {} - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - readAcknowledged(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - writeAcknowledged(out); - } - } - - public static class TransportAction extends TransportMasterNodeAction { - - private final Client client; - private final JobManager jobManager; - private final PersistentTasksService persistentTasksService; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - JobManager jobManager, PersistentTasksService persistentTasksService, Client client) { - super(settings, DeleteJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, Request::new); - this.client = client; - this.jobManager = jobManager; - this.persistentTasksService = persistentTasksService; - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { - - ActionListener markAsDeletingListener = ActionListener.wrap( - response -> { - if (request.isForce()) { - forceDeleteJob(request, (JobStorageDeletionTask) task, listener); - } else { - normalDeleteJob(request, (JobStorageDeletionTask) task, listener); - } - }, - e -> { - if (e instanceof MlMetadata.JobAlreadyMarkedAsDeletedException) { - // Don't kick off a parallel deletion task, but just wait for - // the in-progress request to finish. This is much safer in the - // case where the job with the same name might be immediately - // recreated after the delete returns. However, if a force - // delete times out then eventually kick off a parallel delete - // in case the original completely failed for some reason. - waitForDeletingJob(request.getJobId(), MachineLearning.STATE_PERSIST_RESTORE_TIMEOUT, ActionListener.wrap( - listener::onResponse, - e2 -> { - if (request.isForce() && e2 instanceof TimeoutException) { - forceDeleteJob(request, (JobStorageDeletionTask) task, listener); - } else { - listener.onFailure(e2); - } - } - )); - } else { - listener.onFailure(e); - } - }); - - markJobAsDeleting(request.getJobId(), markAsDeletingListener, request.isForce()); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { - throw new UnsupportedOperationException("the Task parameter is required"); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - - private void normalDeleteJob(Request request, JobStorageDeletionTask task, ActionListener listener) { - jobManager.deleteJob(request, task, listener); - } - - private void forceDeleteJob(Request request, JobStorageDeletionTask task, ActionListener listener) { - - final ClusterState state = clusterService.state(); - final String jobId = request.getJobId(); - - // 3. Delete the job - ActionListener removeTaskListener = new ActionListener() { - @Override - public void onResponse(Boolean response) { - jobManager.deleteJob(request, task, listener); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof ResourceNotFoundException) { - jobManager.deleteJob(request, task, listener); - } else { - listener.onFailure(e); - } - } - }; - - // 2. Cancel the persistent task. This closes the process gracefully so - // the process should be killed first. - ActionListener killJobListener = ActionListener.wrap( - response -> { - removePersistentTask(request.getJobId(), state, removeTaskListener); - }, - e -> { - if (e instanceof ElasticsearchStatusException) { - // Killing the process marks the task as completed so it - // may have disappeared when we get here - removePersistentTask(request.getJobId(), state, removeTaskListener); - } else { - listener.onFailure(e); - } - } - ); - - // 1. Kill the job's process - killProcess(jobId, killJobListener); - } - - private void killProcess(String jobId, ActionListener listener) { - KillProcessAction.Request killRequest = new KillProcessAction.Request(jobId); - executeAsyncWithOrigin(client, ML_ORIGIN, KillProcessAction.INSTANCE, killRequest, listener); - } - - private void removePersistentTask(String jobId, ClusterState currentState, - ActionListener listener) { - PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); - if (jobTask == null) { - listener.onResponse(null); - } else { - persistentTasksService.cancelPersistentTask(jobTask.getId(), - new ActionListener>() { - @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - } - } - - void markJobAsDeleting(String jobId, ActionListener listener, boolean force) { - clusterService.submitStateUpdateTask("mark-job-as-deleted", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - MlMetadata currentMlMetadata = currentState.metaData().custom(MlMetadata.TYPE); - PersistentTasksCustomMetaData tasks = currentState.metaData().custom(PersistentTasksCustomMetaData.TYPE); - MlMetadata.Builder builder = new MlMetadata.Builder(currentMlMetadata); - builder.markJobAsDeleted(jobId, tasks, force); - return buildNewClusterState(currentState, builder); - } - - @Override - public void onFailure(String source, Exception e) { - listener.onFailure(e); - } - - @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { - logger.debug("Job [" + jobId + "] is successfully marked as deleted"); - listener.onResponse(true); - } - }); - } - - void waitForDeletingJob(String jobId, TimeValue timeout, ActionListener listener) { - ClusterStateObserver stateObserver = new ClusterStateObserver(clusterService, timeout, logger, threadPool.getThreadContext()); - - ClusterState clusterState = stateObserver.setAndGetObservedState(); - if (jobIsDeletedFromState(jobId, clusterState)) { - listener.onResponse(new Response(true)); - } else { - stateObserver.waitForNextChange(new ClusterStateObserver.Listener() { - @Override - public void onNewClusterState(ClusterState state) { - listener.onResponse(new Response(true)); - } - - @Override - public void onClusterServiceClose() { - listener.onFailure(new NodeClosedException(clusterService.localNode())); - } - - @Override - public void onTimeout(TimeValue timeout) { - listener.onFailure(new TimeoutException("timed out after " + timeout)); - } - }, newClusterState -> jobIsDeletedFromState(jobId, newClusterState), timeout); - } - } - - static boolean jobIsDeletedFromState(String jobId, ClusterState clusterState) { - MlMetadata metadata = clusterState.metaData().custom(MlMetadata.TYPE); - if (metadata == null) { - return true; - } - return !metadata.getJobs().containsKey(jobId); - } - - private static ClusterState buildNewClusterState(ClusterState currentState, MlMetadata.Builder builder) { - ClusterState.Builder newState = ClusterState.builder(currentState); - newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()); - return newState.build(); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java deleted file mode 100644 index 68172a3d1e3..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; -import org.elasticsearch.xpack.ml.notifications.Auditor; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -public class DeleteModelSnapshotAction extends Action { - - public static final DeleteModelSnapshotAction INSTANCE = new DeleteModelSnapshotAction(); - public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/delete"; - - private DeleteModelSnapshotAction() { - super(NAME); - } - - @Override - public DeleteModelSnapshotAction.RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public DeleteModelSnapshotAction.Response newResponse() { - return new Response(); - } - - public static class Request extends ActionRequest { - - private String jobId; - private String snapshotId; - - private Request() { - } - - public Request(String jobId, String snapshotId) { - this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); - this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, ModelSnapshot.SNAPSHOT_ID.getPreferredName()); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - jobId = in.readString(); - snapshotId = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(jobId); - out.writeString(snapshotId); - } - } - - public static class Response extends AcknowledgedResponse { - - public Response(boolean acknowledged) { - super(acknowledged); - } - - private Response() {} - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - readAcknowledged(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - writeAcknowledged(out); - } - - } - - public static class RequestBuilder extends ActionRequestBuilder { - - public RequestBuilder(ElasticsearchClient client, DeleteModelSnapshotAction action) { - super(client, action, new Request()); - } - } - - public static class TransportAction extends HandledTransportAction { - - private final Client client; - private final JobProvider jobProvider; - private final ClusterService clusterService; - private final Auditor auditor; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - JobProvider jobProvider, ClusterService clusterService, Client client, Auditor auditor) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.client = client; - this.jobProvider = jobProvider; - this.clusterService = clusterService; - this.auditor = auditor; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - // Verify the snapshot exists - jobProvider.modelSnapshots( - request.getJobId(), 0, 1, null, null, null, true, request.getSnapshotId(), - page -> { - List deleteCandidates = page.results(); - if (deleteCandidates.size() > 1) { - logger.warn("More than one model found for [job_id: " + request.getJobId() - + ", snapshot_id: " + request.getSnapshotId() + "] tuple."); - } - - if (deleteCandidates.isEmpty()) { - listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, - request.getSnapshotId(), request.getJobId()))); - return; - } - ModelSnapshot deleteCandidate = deleteCandidates.get(0); - - // Verify the snapshot is not being used - Job job = JobManager.getJobOrThrowIfUnknown(request.getJobId(), clusterService.state()); - String currentModelInUse = job.getModelSnapshotId(); - if (currentModelInUse != null && currentModelInUse.equals(request.getSnapshotId())) { - throw new IllegalArgumentException(Messages.getMessage(Messages.REST_CANNOT_DELETE_HIGHEST_PRIORITY, - request.getSnapshotId(), request.getJobId())); - } - - // Delete the snapshot and any associated state files - JobDataDeleter deleter = new JobDataDeleter(client, request.getJobId()); - deleter.deleteModelSnapshots(Collections.singletonList(deleteCandidate), new ActionListener() { - @Override - public void onResponse(BulkResponse bulkResponse) { - String msg = Messages.getMessage(Messages.JOB_AUDIT_SNAPSHOT_DELETED, deleteCandidate.getSnapshotId(), - deleteCandidate.getDescription()); - auditor.info(request.getJobId(), msg); - logger.debug("[{}] {}", request.getJobId(), msg); - // We don't care about the bulk response, just that it succeeded - listener.onResponse(new DeleteModelSnapshotAction.Response(true)); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - - }, listener::onFailure); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/FinalizeJobExecutionAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/FinalizeJobExecutionAction.java deleted file mode 100644 index cc7a574d0de..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/FinalizeJobExecutionAction.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.action.support.master.MasterNodeRequest; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.job.config.Job; - -import java.io.IOException; -import java.util.Date; - -public class FinalizeJobExecutionAction extends Action { - - public static final FinalizeJobExecutionAction INSTANCE = new FinalizeJobExecutionAction(); - public static final String NAME = "cluster:internal/xpack/ml/job/finalize_job_execution"; - - private FinalizeJobExecutionAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, INSTANCE); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends MasterNodeRequest { - - private String[] jobIds; - - public Request(String[] jobIds) { - this.jobIds = jobIds; - } - - Request() { - } - - public String[] getJobIds() { - return jobIds; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - jobIds = in.readStringArray(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(jobIds); - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - } - - public static class RequestBuilder - extends MasterNodeOperationRequestBuilder { - - public RequestBuilder(ElasticsearchClient client, FinalizeJobExecutionAction action) { - super(client, action, new Request()); - } - } - - public static class Response extends AcknowledgedResponse { - - Response(boolean acknowledged) { - super(acknowledged); - } - - Response() { - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - readAcknowledged(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - writeAcknowledged(out); - } - } - - public static class TransportAction extends TransportMasterNodeAction { - - @Inject - public TransportAction(Settings settings, TransportService transportService, - ClusterService clusterService, ThreadPool threadPool, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, Request::new); - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, - ActionListener listener) throws Exception { - String jobIdString = String.join(",", request.getJobIds()); - String source = "finalize_job_execution [" + jobIdString + "]"; - logger.debug("finalizing jobs [{}]", jobIdString); - clusterService.submitStateUpdateTask(source, new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - MlMetadata mlMetadata = currentState.metaData().custom(MlMetadata.TYPE); - MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(mlMetadata); - Date finishedTime = new Date(); - - for (String jobId : request.getJobIds()) { - Job.Builder jobBuilder = new Job.Builder(mlMetadata.getJobs().get(jobId)); - jobBuilder.setFinishedTime(finishedTime); - mlMetadataBuilder.putJob(jobBuilder.build(), true); - } - ClusterState.Builder builder = ClusterState.builder(currentState); - return builder.metaData(new MetaData.Builder(currentState.metaData()) - .putCustom(MlMetadata.TYPE, mlMetadataBuilder.build())) - .build(); - } - - @Override - public void onFailure(String source, Exception e) { - listener.onFailure(e); - } - - @Override - public void clusterStateProcessed(String source, ClusterState oldState, - ClusterState newState) { - logger.debug("finalized job [{}]", jobIdString); - listener.onResponse(new Response(true)); - } - }); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - } - -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetFiltersAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetFiltersAction.java deleted file mode 100644 index ba4eff3a44f..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetFiltersAction.java +++ /dev/null @@ -1,311 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.get.GetAction; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.StatusToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetaIndex; -import org.elasticsearch.xpack.ml.action.util.PageParams; -import org.elasticsearch.xpack.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.config.MlFilter; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; - - -public class GetFiltersAction extends Action { - - public static final GetFiltersAction INSTANCE = new GetFiltersAction(); - public static final String NAME = "cluster:admin/xpack/ml/filters/get"; - - private GetFiltersAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends ActionRequest { - - private String filterId; - private PageParams pageParams; - - public Request() { - } - - public void setFilterId(String filterId) { - this.filterId = filterId; - } - - public String getFilterId() { - return filterId; - } - - public PageParams getPageParams() { - return pageParams; - } - - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (pageParams != null && filterId != null) { - validationException = addValidationError("Params [" + PageParams.FROM.getPreferredName() + - ", " + PageParams.SIZE.getPreferredName() + "] are incompatible with [" - + MlFilter.ID.getPreferredName() + "]", validationException); - } - return validationException; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - filterId = in.readOptionalString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeOptionalString(filterId); - } - - @Override - public int hashCode() { - return Objects.hash(filterId); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - Request other = (Request) obj; - return Objects.equals(filterId, other.filterId); - } - } - - public static class RequestBuilder extends ActionRequestBuilder { - - public RequestBuilder(ElasticsearchClient client) { - super(client, INSTANCE, new Request()); - } - } - - public static class Response extends ActionResponse implements StatusToXContentObject { - - private QueryPage filters; - - public Response(QueryPage filters) { - this.filters = filters; - } - - Response() { - } - - public QueryPage getFilters() { - return filters; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - filters = new QueryPage<>(in, MlFilter::new); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - filters.writeTo(out); - } - - @Override - public RestStatus status() { - return RestStatus.OK; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - filters.doXContentBody(builder, params); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(filters); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - Response other = (Response) obj; - return Objects.equals(filters, other.filters); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - } - - public static class TransportAction extends HandledTransportAction { - - private final Client client; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Client client) { - super(settings, NAME, threadPool, transportService, actionFilters, - indexNameExpressionResolver, Request::new); - this.client = client; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - final String filterId = request.getFilterId(); - if (!Strings.isNullOrEmpty(filterId)) { - getFilter(filterId, listener); - } else { - PageParams pageParams = request.getPageParams(); - if (pageParams == null) { - pageParams = PageParams.defaultParams(); - } - getFilters(pageParams, listener); - } - } - - private void getFilter(String filterId, ActionListener listener) { - GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); - executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener() { - @Override - public void onResponse(GetResponse getDocResponse) { - - try { - QueryPage responseBody; - if (getDocResponse.isExists()) { - BytesReference docSource = getDocResponse.getSourceAsBytesRef(); - XContentParser parser = - XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource); - MlFilter filter = MlFilter.PARSER.apply(parser, null).build(); - responseBody = new QueryPage<>(Collections.singletonList(filter), 1, MlFilter.RESULTS_FIELD); - - Response filterResponse = new Response(responseBody); - listener.onResponse(filterResponse); - } else { - this.onFailure(QueryPage.emptyQueryPage(MlFilter.RESULTS_FIELD)); - } - - } catch (Exception e) { - this.onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - } - - private void getFilters(PageParams pageParams, ActionListener listener) { - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder() - .from(pageParams.getFrom()) - .size(pageParams.getSize()) - .query(QueryBuilders.termQuery(MlFilter.TYPE.getPreferredName(), MlFilter.FILTER_TYPE)); - - SearchRequest searchRequest = new SearchRequest(MlMetaIndex.INDEX_NAME) - .indicesOptions(JobProvider.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)) - .source(sourceBuilder); - - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, new ActionListener() { - @Override - public void onResponse(SearchResponse response) { - List docs = new ArrayList<>(); - for (SearchHit hit : response.getHits().getHits()) { - BytesReference docSource = hit.getSourceRef(); - try (XContentParser parser = XContentFactory.xContent(docSource).createParser( - NamedXContentRegistry.EMPTY, docSource)) { - docs.add(MlFilter.PARSER.apply(parser, null).build()); - } catch (IOException e) { - this.onFailure(e); - } - } - - Response filterResponse = new Response(new QueryPage<>(docs, docs.size(), MlFilter.RESULTS_FIELD)); - listener.onResponse(filterResponse); - } - - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }, - client::search); - } - } -} - diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetOverallBucketsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetOverallBucketsAction.java deleted file mode 100644 index 8cbf9605406..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetOverallBucketsAction.java +++ /dev/null @@ -1,606 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.min.Min; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.action.util.QueryPage; -import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder; -import org.elasticsearch.xpack.ml.job.persistence.JobProvider; -import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsAggregator; -import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsCollector; -import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProcessor; -import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProvider; -import org.elasticsearch.xpack.ml.job.results.Bucket; -import org.elasticsearch.xpack.ml.job.results.OverallBucket; -import org.elasticsearch.xpack.ml.job.results.Result; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.utils.Intervals; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.function.LongSupplier; - -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; - -/** - *

    - * This action returns summarized bucket results over multiple jobs. - * Overall buckets have the span of the largest job's bucket_span. - * Their score is calculated by finding the max anomaly score per job - * and then averaging the top N. - *

    - *

    - * Overall buckets can be optionally aggregated into larger intervals - * by setting the bucket_span parameter. When that is the case, the - * overall_score is the max of the overall buckets that are within - * the interval. - *

    - */ -public class GetOverallBucketsAction - extends Action { - - public static final GetOverallBucketsAction INSTANCE = new GetOverallBucketsAction(); - public static final String NAME = "cluster:monitor/xpack/ml/job/results/overall_buckets/get"; - - private GetOverallBucketsAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends ActionRequest implements ToXContentObject { - - public static final ParseField TOP_N = new ParseField("top_n"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField OVERALL_SCORE = new ParseField("overall_score"); - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); - - static { - PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); - PARSER.declareInt(Request::setTopN, TOP_N); - PARSER.declareString(Request::setBucketSpan, BUCKET_SPAN); - PARSER.declareDouble(Request::setOverallScore, OVERALL_SCORE); - PARSER.declareBoolean(Request::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareString((request, startTime) -> request.setStart(parseDateOrThrow( - startTime, START, System::currentTimeMillis)), START); - PARSER.declareString((request, endTime) -> request.setEnd(parseDateOrThrow( - endTime, END, System::currentTimeMillis)), END); - PARSER.declareBoolean(Request::setAllowNoJobs, ALLOW_NO_JOBS); - } - - static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now) { - DateMathParser dateMathParser = new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); - - try { - return dateMathParser.parse(date, now); - } catch (Exception e) { - String msg = Messages.getMessage(Messages.REST_INVALID_DATETIME_PARAMS, paramName.getPreferredName(), date); - throw new ElasticsearchParseException(msg, e); - } - } - - public static Request parseRequest(String jobId, XContentParser parser) { - Request request = PARSER.apply(parser, null); - if (jobId != null) { - request.jobId = jobId; - } - return request; - } - - private String jobId; - private int topN = 1; - private TimeValue bucketSpan; - private double overallScore = 0.0; - private boolean excludeInterim = false; - private Long start; - private Long end; - private boolean allowNoJobs = true; - - Request() { - } - - public Request(String jobId) { - this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public String getJobId() { - return jobId; - } - - public int getTopN() { - return topN; - } - - public void setTopN(int topN) { - if (topN <= 0) { - throw new IllegalArgumentException("[topN] parameter must be positive, found [" + topN + "]"); - } - this.topN = topN; - } - - public TimeValue getBucketSpan() { - return bucketSpan; - } - - public void setBucketSpan(TimeValue bucketSpan) { - this.bucketSpan = bucketSpan; - } - - public void setBucketSpan(String bucketSpan) { - this.bucketSpan = TimeValue.parseTimeValue(bucketSpan, BUCKET_SPAN.getPreferredName()); - } - - public double getOverallScore() { - return overallScore; - } - - public void setOverallScore(double overallScore) { - this.overallScore = overallScore; - } - - public boolean isExcludeInterim() { - return excludeInterim; - } - - public void setExcludeInterim(boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public Long getStart() { - return start; - } - - public void setStart(Long start) { - this.start = start; - } - - public void setStart(String start) { - setStart(parseDateOrThrow(start, START, System::currentTimeMillis)); - } - - public Long getEnd() { - return end; - } - - public void setEnd(Long end) { - this.end = end; - } - - public void setEnd(String end) { - setEnd(parseDateOrThrow(end, END, System::currentTimeMillis)); - } - - public boolean allowNoJobs() { - return allowNoJobs; - } - - public void setAllowNoJobs(boolean allowNoJobs) { - this.allowNoJobs = allowNoJobs; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - jobId = in.readString(); - topN = in.readVInt(); - bucketSpan = in.readOptionalWriteable(TimeValue::new); - overallScore = in.readDouble(); - excludeInterim = in.readBoolean(); - start = in.readOptionalLong(); - end = in.readOptionalLong(); - allowNoJobs = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(jobId); - out.writeVInt(topN); - out.writeOptionalWriteable(bucketSpan); - out.writeDouble(overallScore); - out.writeBoolean(excludeInterim); - out.writeOptionalLong(start); - out.writeOptionalLong(end); - out.writeBoolean(allowNoJobs); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(TOP_N.getPreferredName(), topN); - if (bucketSpan != null) { - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); - } - builder.field(OVERALL_SCORE.getPreferredName(), overallScore); - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - if (start != null) { - builder.field(START.getPreferredName(), String.valueOf(start)); - } - if (end != null) { - builder.field(END.getPreferredName(), String.valueOf(end)); - } - builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, topN, bucketSpan, overallScore, excludeInterim, start, end, allowNoJobs); - } - - @Override - public boolean equals(Object other) { - if (other == null) { - return false; - } - if (getClass() != other.getClass()) { - return false; - } - Request that = (Request) other; - return Objects.equals(jobId, that.jobId) && - this.topN == that.topN && - Objects.equals(bucketSpan, that.bucketSpan) && - this.excludeInterim == that.excludeInterim && - this.overallScore == that.overallScore && - Objects.equals(start, that.start) && - Objects.equals(end, that.end) && - this.allowNoJobs == that.allowNoJobs; - } - } - - static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client) { - super(client, INSTANCE, new Request()); - } - } - - public static class Response extends ActionResponse implements ToXContentObject { - - private QueryPage overallBuckets; - - Response() { - overallBuckets = new QueryPage<>(Collections.emptyList(), 0, OverallBucket.RESULTS_FIELD); - } - - Response(QueryPage overallBuckets) { - this.overallBuckets = overallBuckets; - } - - public QueryPage getOverallBuckets() { - return overallBuckets; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - overallBuckets = new QueryPage<>(in, OverallBucket::new); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - overallBuckets.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - overallBuckets.doXContentBody(builder, params); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(overallBuckets); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - Response other = (Response) obj; - return Objects.equals(overallBuckets, other.overallBuckets); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - } - - public static class TransportAction extends HandledTransportAction { - - private static final String EARLIEST_TIME = "earliest_time"; - private static final String LATEST_TIME = "latest_time"; - - private final Client client; - private final ClusterService clusterService; - private final JobManager jobManager; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, JobManager jobManager, Client client) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.clusterService = clusterService; - this.client = client; - this.jobManager = jobManager; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - QueryPage jobsPage = jobManager.expandJobs(request.getJobId(), request.allowNoJobs(), clusterService.state()); - if (jobsPage.count() == 0) { - listener.onResponse(new Response()); - return; - } - - // As computing and potentially aggregating overall buckets might take a while, - // we run in a different thread to avoid blocking the network thread. - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { - try { - getOverallBuckets(request, jobsPage.results(), listener); - } catch (Exception e) { - listener.onFailure(e); - } - }); - } - - private void getOverallBuckets(Request request, List jobs, ActionListener listener) { - JobsContext jobsContext = JobsContext.build(jobs, request); - - ActionListener> overallBucketsListener = ActionListener.wrap(overallBuckets -> { - listener.onResponse(new Response(new QueryPage<>(overallBuckets, overallBuckets.size(), OverallBucket.RESULTS_FIELD))); - }, listener::onFailure); - - ActionListener chunkedBucketSearcherListener = ActionListener.wrap(searcher -> { - if (searcher == null) { - listener.onResponse(new Response()); - return; - } - searcher.searchAndComputeOverallBuckets(overallBucketsListener); - }, listener::onFailure); - - OverallBucketsProvider overallBucketsProvider = new OverallBucketsProvider(jobsContext.maxBucketSpan, request.getTopN(), - request.getOverallScore()); - OverallBucketsProcessor overallBucketsProcessor = requiresAggregation(request, jobsContext.maxBucketSpan) ? - new OverallBucketsAggregator(request.getBucketSpan()): new OverallBucketsCollector(); - initChunkedBucketSearcher(request, jobsContext, overallBucketsProvider, overallBucketsProcessor, chunkedBucketSearcherListener); - } - - private static boolean requiresAggregation(Request request, TimeValue maxBucketSpan) { - return request.getBucketSpan() != null && !request.getBucketSpan().equals(maxBucketSpan); - } - - private static void checkValidBucketSpan(TimeValue bucketSpan, TimeValue maxBucketSpan) { - if (bucketSpan != null && bucketSpan.compareTo(maxBucketSpan) < 0) { - throw ExceptionsHelper.badRequestException("Param [{}] must be greater or equal to the max bucket_span [{}]", - Request.BUCKET_SPAN, maxBucketSpan.getStringRep()); - } - } - - private void initChunkedBucketSearcher(Request request, JobsContext jobsContext, OverallBucketsProvider overallBucketsProvider, - OverallBucketsProcessor overallBucketsProcessor, - ActionListener listener) { - long maxBucketSpanMillis = jobsContext.maxBucketSpan.millis(); - SearchRequest searchRequest = buildSearchRequest(request.getStart(), request.getEnd(), request.isExcludeInterim(), - maxBucketSpanMillis, jobsContext.indices); - searchRequest.source().aggregation(AggregationBuilders.min(EARLIEST_TIME).field(Result.TIMESTAMP.getPreferredName())); - searchRequest.source().aggregation(AggregationBuilders.max(LATEST_TIME).field(Result.TIMESTAMP.getPreferredName())); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, - ActionListener.wrap(searchResponse -> { - long totalHits = searchResponse.getHits().getTotalHits(); - if (totalHits > 0) { - Aggregations aggregations = searchResponse.getAggregations(); - Min min = aggregations.get(EARLIEST_TIME); - long earliestTime = Intervals.alignToFloor((long) min.getValue(), maxBucketSpanMillis); - Max max = aggregations.get(LATEST_TIME); - long latestTime = Intervals.alignToCeil((long) max.getValue() + 1, maxBucketSpanMillis); - listener.onResponse(new ChunkedBucketSearcher(jobsContext, earliestTime, latestTime, request.isExcludeInterim(), - overallBucketsProvider, overallBucketsProcessor)); - } else { - listener.onResponse(null); - } - }, listener::onFailure), - client::search); - } - - private static class JobsContext { - private final int jobCount; - private final String[] indices; - private final TimeValue maxBucketSpan; - - private JobsContext(int jobCount, String[] indices, TimeValue maxBucketSpan) { - this.jobCount = jobCount; - this.indices = indices; - this.maxBucketSpan = maxBucketSpan; - } - - private static JobsContext build(List jobs, Request request) { - Set indices = new HashSet<>(); - TimeValue maxBucketSpan = TimeValue.ZERO; - for (Job job : jobs) { - indices.add(AnomalyDetectorsIndex.jobResultsAliasedName(job.getId())); - TimeValue bucketSpan = job.getAnalysisConfig().getBucketSpan(); - if (maxBucketSpan.compareTo(bucketSpan) < 0) { - maxBucketSpan = bucketSpan; - } - } - checkValidBucketSpan(request.getBucketSpan(), maxBucketSpan); - - // If top_n is 1, we can use the request bucket_span in order to optimize the aggregations - if (request.getBucketSpan() != null && (request.getTopN() == 1 || jobs.size() <= 1)) { - maxBucketSpan = request.getBucketSpan(); - } - - return new JobsContext(jobs.size(), indices.toArray(new String[indices.size()]), maxBucketSpan); - } - } - - private class ChunkedBucketSearcher { - - private static final int BUCKETS_PER_CHUNK = 1000; - private static final int MAX_RESULT_COUNT = 10000; - - private final String[] indices; - private final long maxBucketSpanMillis; - private final boolean excludeInterim; - private final long chunkMillis; - private final long endTime; - private volatile long curTime; - private final AggregationBuilder aggs; - private final OverallBucketsProvider overallBucketsProvider; - private final OverallBucketsProcessor overallBucketsProcessor; - - ChunkedBucketSearcher(JobsContext jobsContext, long startTime, long endTime, - boolean excludeInterim, OverallBucketsProvider overallBucketsProvider, - OverallBucketsProcessor overallBucketsProcessor) { - this.indices = jobsContext.indices; - this.maxBucketSpanMillis = jobsContext.maxBucketSpan.millis(); - this.chunkMillis = BUCKETS_PER_CHUNK * maxBucketSpanMillis; - this.endTime = endTime; - this.curTime = startTime; - this.excludeInterim = excludeInterim; - this.aggs = buildAggregations(maxBucketSpanMillis, jobsContext.jobCount); - this.overallBucketsProvider = overallBucketsProvider; - this.overallBucketsProcessor = overallBucketsProcessor; - } - - void searchAndComputeOverallBuckets(ActionListener> listener) { - if (curTime >= endTime) { - listener.onResponse(overallBucketsProcessor.finish()); - return; - } - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, nextSearch(), - ActionListener.wrap(searchResponse -> { - Histogram histogram = searchResponse.getAggregations().get(Result.TIMESTAMP.getPreferredName()); - overallBucketsProcessor.process(overallBucketsProvider.computeOverallBuckets(histogram)); - if (overallBucketsProcessor.size() > MAX_RESULT_COUNT) { - listener.onFailure( - ExceptionsHelper.badRequestException("Unable to return more than [{}] results; please use " + - "parameters [{}] and [{}] to limit the time range", MAX_RESULT_COUNT, Request.START, Request.END)); - return; - } - searchAndComputeOverallBuckets(listener); - }, listener::onFailure), - client::search); - } - - SearchRequest nextSearch() { - long curEnd = Math.min(curTime + chunkMillis, endTime); - logger.debug("Search for buckets in: [{}, {})", curTime, curEnd); - SearchRequest searchRequest = buildSearchRequest(curTime, curEnd, excludeInterim, maxBucketSpanMillis, indices); - searchRequest.source().aggregation(aggs); - curTime += chunkMillis; - return searchRequest; - } - } - - private static SearchRequest buildSearchRequest(Long start, Long end, boolean excludeInterim, long bucketSpanMillis, - String[] indices) { - String startTime = start == null ? null : String.valueOf(Intervals.alignToCeil(start, bucketSpanMillis)); - String endTime = end == null ? null : String.valueOf(Intervals.alignToFloor(end, bucketSpanMillis)); - - SearchSourceBuilder searchSourceBuilder = new BucketsQueryBuilder() - .size(0) - .includeInterim(excludeInterim == false) - .start(startTime) - .end(endTime) - .build(); - - SearchRequest searchRequest = new SearchRequest(indices); - searchRequest.indicesOptions(JobProvider.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); - searchRequest.source(searchSourceBuilder); - return searchRequest; - } - - private static AggregationBuilder buildAggregations(long maxBucketSpanMillis, int jobCount) { - AggregationBuilder overallScoreAgg = AggregationBuilders.max(OverallBucket.OVERALL_SCORE.getPreferredName()) - .field(Bucket.ANOMALY_SCORE.getPreferredName()); - AggregationBuilder jobsAgg = AggregationBuilders.terms(Job.ID.getPreferredName()) - .field(Job.ID.getPreferredName()).size(jobCount).subAggregation(overallScoreAgg); - AggregationBuilder interimAgg = AggregationBuilders.max(Result.IS_INTERIM.getPreferredName()) - .field(Result.IS_INTERIM.getPreferredName()); - return AggregationBuilders.dateHistogram(Result.TIMESTAMP.getPreferredName()) - .field(Result.TIMESTAMP.getPreferredName()) - .interval(maxBucketSpanMillis) - .subAggregation(jobsAgg) - .subAggregation(interimAgg); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/KillProcessAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/KillProcessAction.java deleted file mode 100644 index 8211027a784..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/KillProcessAction.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.tasks.BaseTasksResponse; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; -import org.elasticsearch.xpack.ml.notifications.Auditor; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; - -import java.io.IOException; -import java.util.Objects; - -public class KillProcessAction extends Action { - - public static final KillProcessAction INSTANCE = new KillProcessAction(); - public static final String NAME = "cluster:internal/xpack/ml/job/kill/process"; - - private KillProcessAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client, KillProcessAction action) { - super(client, action, new Request()); - } - } - - public static class Request extends TransportJobTaskAction.JobTaskRequest { - - public Request(String jobId) { - super(jobId); - } - - Request() { - super(); - } - } - - public static class Response extends BaseTasksResponse implements Writeable { - - private boolean killed; - - Response() { - super(null, null); - } - - Response(StreamInput in) throws IOException { - super(null, null); - readFrom(in); - } - - Response(boolean killed) { - super(null, null); - this.killed = killed; - } - - public boolean isKilled() { - return killed; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - killed = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(killed); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response response = (Response) o; - return killed == response.killed; - } - - @Override - public int hashCode() { - return Objects.hash(killed); - } - } - - public static class TransportAction extends TransportJobTaskAction { - - private final Auditor auditor; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - AutodetectProcessManager processManager, Auditor auditor) { - super(settings, NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - Request::new, Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME, processManager); - this.auditor = auditor; - } - - @Override - protected void taskOperation(Request request, OpenJobAction.JobTask jobTask, ActionListener listener) { - logger.info("[{}] Killing job", jobTask.getJobId()); - auditor.info(jobTask.getJobId(), Messages.JOB_AUDIT_KILLING); - - try { - processManager.killProcess(jobTask, true, null); - listener.onResponse(new Response(true)); - } catch (Exception e) { - listener.onFailure(e); - } - } - - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - DiscoveryNodes nodes = clusterService.state().nodes(); - PersistentTasksCustomMetaData tasks = clusterService.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(request.getJobId(), tasks); - if (jobTask == null || jobTask.getExecutorNode() == null) { - logger.debug("[{}] Cannot kill the process because job is not open", request.getJobId()); - listener.onResponse(new Response(false)); - return; - } - - DiscoveryNode executorNode = nodes.get(jobTask.getExecutorNode()); - if (executorNode == null) { - listener.onFailure(ExceptionsHelper.conflictStatusException("Cannot kill process for job {} as" + - "executor node {} cannot be found", request.getJobId(), jobTask.getExecutorNode())); - return; - } - - Version nodeVersion = executorNode.getVersion(); - if (nodeVersion.before(Version.V_5_5_0)) { - listener.onFailure(new ElasticsearchException("Cannot kill the process on node with version " + nodeVersion)); - return; - } - - super.doExecute(task, request, listener); - } - - - @Override - protected Response readTaskResponse(StreamInput in) throws IOException { - return new Response(in); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java deleted file mode 100644 index b6c5c3b54fa..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlClientHelper; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.datafeed.ChunkingConfig; -import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; -import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; -import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.stream.Collectors; - -public class PreviewDatafeedAction extends Action { - - public static final PreviewDatafeedAction INSTANCE = new PreviewDatafeedAction(); - public static final String NAME = "cluster:admin/xpack/ml/datafeeds/preview"; - - private PreviewDatafeedAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends ActionRequest implements ToXContentObject { - - private String datafeedId; - - Request() { - } - - public Request(String datafeedId) { - setDatafeedId(datafeedId); - } - - public String getDatafeedId() { - return datafeedId; - } - - public final void setDatafeedId(String datafeedId) { - this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - datafeedId = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(datafeedId); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - Request other = (Request) obj; - return Objects.equals(datafeedId, other.datafeedId); - } - } - - static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client) { - super(client, INSTANCE, new Request()); - } - } - - public static class Response extends ActionResponse implements ToXContentObject { - - private BytesReference preview; - - Response() { - } - - Response(BytesReference preview) { - this.preview = preview; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - preview = in.readBytesReference(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBytesReference(preview); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.rawValue(preview, XContentType.JSON); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(preview); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - Response other = (Response) obj; - return Objects.equals(preview, other.preview); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - } - - public static class TransportAction extends HandledTransportAction { - - private final Client client; - private final ClusterService clusterService; - - @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Client client, - ClusterService clusterService) { - super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.client = client; - this.clusterService = clusterService; - } - - @Override - protected void doExecute(Request request, ActionListener listener) { - MlMetadata mlMetadata = clusterService.state().getMetaData().custom(MlMetadata.TYPE); - DatafeedConfig datafeed = mlMetadata.getDatafeed(request.getDatafeedId()); - if (datafeed == null) { - throw ExceptionsHelper.missingDatafeedException(request.getDatafeedId()); - } - Job job = mlMetadata.getJobs().get(datafeed.getJobId()); - if (job == null) { - throw ExceptionsHelper.missingJobException(datafeed.getJobId()); - } - DatafeedConfig.Builder datafeedWithAutoChunking = new DatafeedConfig.Builder(datafeed); - datafeedWithAutoChunking.setChunkingConfig(ChunkingConfig.newAuto()); - Map headers = threadPool.getThreadContext().getHeaders().entrySet().stream() - .filter(e -> MlClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - datafeedWithAutoChunking.setHeaders(headers); - // NB: this is using the client from the transport layer, NOT the internal client. - // This is important because it means the datafeed search will fail if the user - // requesting the preview doesn't have permission to search the relevant indices. - DataExtractorFactory.create(client, datafeedWithAutoChunking.build(), job, new ActionListener() { - @Override - public void onResponse(DataExtractorFactory dataExtractorFactory) { - DataExtractor dataExtractor = dataExtractorFactory.newExtractor(0, Long.MAX_VALUE); - threadPool.generic().execute(() -> previewDatafeed(dataExtractor, listener)); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - - } - - /** Visible for testing */ - static void previewDatafeed(DataExtractor dataExtractor, ActionListener listener) { - try { - Optional inputStream = dataExtractor.next(); - // DataExtractor returns single-line JSON but without newline characters between objects. - // Instead, it has a space between objects due to how JSON XContenetBuilder works. - // In order to return a proper JSON array from preview, we surround with square brackets and - // we stick in a comma between objects. - // Also, the stream is expected to be a single line but in case it is not, we join lines - // using space to ensure the comma insertion works correctly. - StringBuilder responseBuilder = new StringBuilder("["); - if (inputStream.isPresent()) { - try (BufferedReader buffer = new BufferedReader(new InputStreamReader(inputStream.get(), StandardCharsets.UTF_8))) { - responseBuilder.append(buffer.lines().collect(Collectors.joining(" ")).replace("} {", "},{")); - } - } - responseBuilder.append("]"); - listener.onResponse(new Response(new BytesArray(responseBuilder.toString().getBytes(StandardCharsets.UTF_8)))); - } catch (Exception e) { - listener.onFailure(e); - } finally { - dataExtractor.cancel(); - } - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java deleted file mode 100644 index 0175e71c50f..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.license.LicenseUtils; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.security.SecurityContext; -import org.elasticsearch.xpack.security.action.user.HasPrivilegesAction; -import org.elasticsearch.xpack.security.action.user.HasPrivilegesRequest; -import org.elasticsearch.xpack.security.action.user.HasPrivilegesResponse; -import org.elasticsearch.xpack.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.security.support.Exceptions; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -public class PutDatafeedAction extends Action { - - public static final PutDatafeedAction INSTANCE = new PutDatafeedAction(); - public static final String NAME = "cluster:admin/xpack/ml/datafeeds/put"; - - private PutDatafeedAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends AcknowledgedRequest implements ToXContentObject { - - public static Request parseRequest(String datafeedId, XContentParser parser) { - DatafeedConfig.Builder datafeed = DatafeedConfig.CONFIG_PARSER.apply(parser, null); - datafeed.setId(datafeedId); - return new Request(datafeed.build()); - } - - private DatafeedConfig datafeed; - - public Request(DatafeedConfig datafeed) { - this.datafeed = datafeed; - } - - Request() { - } - - public DatafeedConfig getDatafeed() { - return datafeed; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - datafeed = new DatafeedConfig(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - datafeed.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - datafeed.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Request request = (Request) o; - return Objects.equals(datafeed, request.datafeed); - } - - @Override - public int hashCode() { - return Objects.hash(datafeed); - } - } - - public static class RequestBuilder extends MasterNodeOperationRequestBuilder { - - public RequestBuilder(ElasticsearchClient client, PutDatafeedAction action) { - super(client, action, new Request()); - } - } - - public static class Response extends AcknowledgedResponse implements ToXContentObject { - - private DatafeedConfig datafeed; - - public Response(boolean acked, DatafeedConfig datafeed) { - super(acked); - this.datafeed = datafeed; - } - - Response() { - } - - public DatafeedConfig getResponse() { - return datafeed; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - readAcknowledged(in); - datafeed = new DatafeedConfig(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - writeAcknowledged(out); - datafeed.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - datafeed.doXContentBody(builder, params); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response response = (Response) o; - return Objects.equals(datafeed, response.datafeed); - } - - @Override - public int hashCode() { - return Objects.hash(datafeed); - } - } - - public static class TransportAction extends TransportMasterNodeAction { - - private final XPackLicenseState licenseState; - private final Client client; - private final boolean securityEnabled; - private final SecurityContext securityContext; - - @Inject - public TransportAction(Settings settings, TransportService transportService, - ClusterService clusterService, ThreadPool threadPool, Client client, - XPackLicenseState licenseState, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, PutDatafeedAction.NAME, transportService, clusterService, threadPool, - actionFilters, indexNameExpressionResolver, Request::new); - this.licenseState = licenseState; - this.client = client; - this.securityEnabled = XPackSettings.SECURITY_ENABLED.get(settings); - this.securityContext = securityEnabled ? new SecurityContext(settings, threadPool.getThreadContext()) : null; - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { - // If security is enabled only create the datafeed if the user requesting creation has - // permission to read the indices the datafeed is going to read from - if (securityEnabled) { - final String username = securityContext.getUser().principal(); - ActionListener privResponseListener = ActionListener.wrap( - r -> handlePrivsResponse(username, request, r, listener), - listener::onFailure); - - HasPrivilegesRequest privRequest = new HasPrivilegesRequest(); - privRequest.username(username); - privRequest.clusterPrivileges(Strings.EMPTY_ARRAY); - // We just check for permission to use the search action. In reality we'll also - // use the scroll action, but that's considered an implementation detail. - privRequest.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder() - .indices(request.getDatafeed().getIndices().toArray(new String[0])) - .privileges(SearchAction.NAME) - .build()); - - client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); - } else { - putDatafeed(request, listener); - } - } - - private void handlePrivsResponse(String username, Request request, - HasPrivilegesResponse response, - ActionListener listener) throws IOException { - if (response.isCompleteMatch()) { - putDatafeed(request, listener); - } else { - XContentBuilder builder = JsonXContent.contentBuilder(); - builder.startObject(); - for (HasPrivilegesResponse.IndexPrivileges index : response.getIndexPrivileges()) { - builder.field(index.getIndex()); - builder.map(index.getPrivileges()); - } - builder.endObject(); - - listener.onFailure(Exceptions.authorizationError("Cannot create datafeed [{}]" + - " because user {} lacks permissions on the indices to be" + - " searched: {}", - request.getDatafeed().getId(), username, builder.string())); - } - } - - private void putDatafeed(Request request, ActionListener listener) { - - clusterService.submitStateUpdateTask( - "put-datafeed-" + request.getDatafeed().getId(), - new AckedClusterStateUpdateTask(request, listener) { - - @Override - protected Response newResponse(boolean acknowledged) { - if (acknowledged) { - logger.info("Created datafeed [{}]", request.getDatafeed().getId()); - } - return new Response(acknowledged, request.getDatafeed()); - } - - @Override - public ClusterState execute(ClusterState currentState) { - return putDatafeed(request, currentState); - } - }); - } - - private ClusterState putDatafeed(Request request, ClusterState clusterState) { - MlMetadata currentMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE); - MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) - .putDatafeed(request.getDatafeed(), threadPool.getThreadContext()).build(); - return ClusterState.builder(clusterState).metaData( - MetaData.builder(clusterState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build()) - .build(); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - if (licenseState.isMachineLearningAllowed()) { - super.doExecute(task, request, listener); - } else { - listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.MACHINE_LEARNING)); - } - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/StartDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/StartDatafeedAction.java deleted file mode 100644 index 9924d650b9e..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/StartDatafeedAction.java +++ /dev/null @@ -1,585 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.ResourceAlreadyExistsException; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ValidateActions; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeRequest; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.license.LicenseUtils; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator; -import org.elasticsearch.xpack.ml.datafeed.DatafeedManager; -import org.elasticsearch.xpack.ml.datafeed.DatafeedNodeSelector; -import org.elasticsearch.xpack.ml.datafeed.DatafeedState; -import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; -import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.config.JobState; -import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.AllocatedPersistentTask; -import org.elasticsearch.xpack.persistent.PersistentTaskParams; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.Assignment; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; -import org.elasticsearch.xpack.persistent.PersistentTasksExecutor; -import org.elasticsearch.xpack.persistent.PersistentTasksService; -import org.elasticsearch.xpack.persistent.PersistentTasksService.WaitForPersistentTaskStatusListener; - -import java.io.IOException; -import java.util.Objects; -import java.util.function.LongSupplier; -import java.util.function.Predicate; - -public class StartDatafeedAction - extends Action { - - public static final ParseField START_TIME = new ParseField("start"); - public static final ParseField END_TIME = new ParseField("end"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - - public static final StartDatafeedAction INSTANCE = new StartDatafeedAction(); - public static final String NAME = "cluster:admin/xpack/ml/datafeed/start"; - public static final String TASK_NAME = "xpack/ml/datafeed"; - - private StartDatafeedAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends MasterNodeRequest implements ToXContentObject { - - public static Request fromXContent(XContentParser parser) { - return parseRequest(null, parser); - } - - public static Request parseRequest(String datafeedId, XContentParser parser) { - DatafeedParams params = DatafeedParams.PARSER.apply(parser, null); - if (datafeedId != null) { - params.datafeedId = datafeedId; - } - return new Request(params); - } - - private DatafeedParams params; - - public Request(String datafeedId, long startTime) { - this.params = new DatafeedParams(datafeedId, startTime); - } - - public Request(String datafeedId, String startTime) { - this.params = new DatafeedParams(datafeedId, startTime); - } - - public Request(DatafeedParams params) { - this.params = params; - } - - public Request(StreamInput in) throws IOException { - readFrom(in); - } - - Request() { - } - - public DatafeedParams getParams() { - return params; - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException e = null; - if (params.endTime != null && params.endTime <= params.startTime) { - e = ValidateActions.addValidationError(START_TIME.getPreferredName() + " [" - + params.startTime + "] must be earlier than " + END_TIME.getPreferredName() - + " [" + params.endTime + "]", e); - } - return e; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - params = new DatafeedParams(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - params.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - this.params.toXContent(builder, params); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(params); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - Request other = (Request) obj; - return Objects.equals(params, other.params); - } - } - - public static class DatafeedParams implements PersistentTaskParams { - - public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, DatafeedParams::new); - - static { - PARSER.declareString((params, datafeedId) -> params.datafeedId = datafeedId, DatafeedConfig.ID); - PARSER.declareString((params, startTime) -> params.startTime = parseDateOrThrow( - startTime, START_TIME, System::currentTimeMillis), START_TIME); - PARSER.declareString(DatafeedParams::setEndTime, END_TIME); - PARSER.declareString((params, val) -> - params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - } - - static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now) { - DateMathParser dateMathParser = new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); - - try { - return dateMathParser.parse(date, now); - } catch (Exception e) { - String msg = Messages.getMessage(Messages.REST_INVALID_DATETIME_PARAMS, paramName.getPreferredName(), date); - throw new ElasticsearchParseException(msg, e); - } - } - - public static DatafeedParams fromXContent(XContentParser parser) { - return parseRequest(null, parser); - } - - public static DatafeedParams parseRequest(String datafeedId, XContentParser parser) { - DatafeedParams params = PARSER.apply(parser, null); - if (datafeedId != null) { - params.datafeedId = datafeedId; - } - return params; - } - - public DatafeedParams(String datafeedId, long startTime) { - this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); - this.startTime = startTime; - } - - public DatafeedParams(String datafeedId, String startTime) { - this(datafeedId, parseDateOrThrow(startTime, START_TIME, System::currentTimeMillis)); - } - - public DatafeedParams(StreamInput in) throws IOException { - datafeedId = in.readString(); - startTime = in.readVLong(); - endTime = in.readOptionalLong(); - timeout = TimeValue.timeValueMillis(in.readVLong()); - } - - DatafeedParams() { - } - - private String datafeedId; - private long startTime; - private Long endTime; - private TimeValue timeout = TimeValue.timeValueSeconds(20); - - public String getDatafeedId() { - return datafeedId; - } - - public long getStartTime() { - return startTime; - } - - public Long getEndTime() { - return endTime; - } - - public void setEndTime(String endTime) { - setEndTime(parseDateOrThrow(endTime, END_TIME, System::currentTimeMillis)); - } - - public void setEndTime(Long endTime) { - this.endTime = endTime; - } - - public TimeValue getTimeout() { - return timeout; - } - - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public String getWriteableName() { - return TASK_NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(datafeedId); - out.writeVLong(startTime); - out.writeOptionalLong(endTime); - out.writeVLong(timeout.millis()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - builder.field(START_TIME.getPreferredName(), String.valueOf(startTime)); - if (endTime != null) { - builder.field(END_TIME.getPreferredName(), String.valueOf(endTime)); - } - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, startTime, endTime, timeout); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - DatafeedParams other = (DatafeedParams) obj; - return Objects.equals(datafeedId, other.datafeedId) && - Objects.equals(startTime, other.startTime) && - Objects.equals(endTime, other.endTime) && - Objects.equals(timeout, other.timeout); - } - } - - public static class Response extends AcknowledgedResponse { - public Response() { - super(); - } - - public Response(boolean acknowledged) { - super(acknowledged); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - readAcknowledged(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - writeAcknowledged(out); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AcknowledgedResponse that = (AcknowledgedResponse) o; - return isAcknowledged() == that.isAcknowledged(); - } - - @Override - public int hashCode() { - return Objects.hash(isAcknowledged()); - } - - } - - static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client, StartDatafeedAction action) { - super(client, action, new Request()); - } - } - - public static class DatafeedTask extends AllocatedPersistentTask { - - private final String datafeedId; - private final long startTime; - private final Long endTime; - /* only pck protected for testing */ - volatile DatafeedManager datafeedManager; - - DatafeedTask(long id, String type, String action, TaskId parentTaskId, DatafeedParams params) { - super(id, type, action, "datafeed-" + params.getDatafeedId(), parentTaskId); - this.datafeedId = params.getDatafeedId(); - this.startTime = params.getStartTime(); - this.endTime = params.getEndTime(); - } - - public String getDatafeedId() { - return datafeedId; - } - - public long getDatafeedStartTime() { - return startTime; - } - - @Nullable - public Long getEndTime() { - return endTime; - } - - public boolean isLookbackOnly() { - return endTime != null; - } - - @Override - protected void onCancelled() { - // If the persistent task framework wants us to stop then we should do so immediately and - // we should wait for an existing datafeed import to realize we want it to stop. - // Note that this only applied when task cancel is invoked and stop datafeed api doesn't use this. - // Also stop datafeed api will obey the timeout. - stop(getReasonCancelled(), TimeValue.ZERO); - } - - public void stop(String reason, TimeValue timeout) { - if (datafeedManager != null) { - datafeedManager.stopDatafeed(this, reason, timeout); - } - } - - public void isolate() { - if (datafeedManager != null) { - datafeedManager.isolateDatafeed(getAllocationId()); - } - } - } - - // This class extends from TransportMasterNodeAction for cluster state observing purposes. - // The stop datafeed api also redirect the elected master node. - // The master node will wait for the datafeed to be started by checking the persistent task's status and then return. - // To ensure that a subsequent stop datafeed call will see that same task status (and sanity validation doesn't fail) - // both start and stop datafeed apis redirect to the elected master node. - // In case of instability persistent tasks checks may fail and that is ok, in that case all bets are off. - // The start datafeed api is a low through put api, so the fact that we redirect to elected master node shouldn't be an issue. - public static class TransportAction extends TransportMasterNodeAction { - - private final Client client; - private final XPackLicenseState licenseState; - private final PersistentTasksService persistentTasksService; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, - XPackLicenseState licenseState, PersistentTasksService persistentTasksService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - Client client) { - super(settings, NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, Request::new); - this.licenseState = licenseState; - this.persistentTasksService = persistentTasksService; - this.client = client; - } - - @Override - protected String executor() { - // This api doesn't do heavy or blocking operations (just delegates PersistentTasksService), - // so we can do this on the network thread - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { - DatafeedParams params = request.params; - if (licenseState.isMachineLearningAllowed()) { - ActionListener> finalListener = new ActionListener>() { - @Override - public void onResponse(PersistentTask persistentTask) { - waitForDatafeedStarted(persistentTask.getId(), params, listener); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof ResourceAlreadyExistsException) { - logger.debug(e); - e = new ElasticsearchStatusException("cannot start datafeed [" + params.getDatafeedId() + - "] because it has already been started", RestStatus.CONFLICT); - } - listener.onFailure(e); - } - }; - - // Verify data extractor factory can be created, then start persistent task - MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE); - PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - StartDatafeedAction.validate(params.getDatafeedId(), mlMetadata, tasks); - DatafeedConfig datafeed = mlMetadata.getDatafeed(params.getDatafeedId()); - Job job = mlMetadata.getJobs().get(datafeed.getJobId()); - DataExtractorFactory.create(client, datafeed, job, ActionListener.wrap( - dataExtractorFactory -> persistentTasksService.startPersistentTask(MlMetadata.datafeedTaskId(params.datafeedId), - TASK_NAME, params, finalListener) - , listener::onFailure)); - } else { - listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.MACHINE_LEARNING)); - } - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - // We only delegate here to PersistentTasksService, but if there is a metadata writeblock, - // then delagating to PersistentTasksService doesn't make a whole lot of sense, - // because PersistentTasksService will then fail. - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - - void waitForDatafeedStarted(String taskId, DatafeedParams params, ActionListener listener) { - Predicate> predicate = persistentTask -> { - if (persistentTask == null) { - return false; - } - DatafeedState datafeedState = (DatafeedState) persistentTask.getStatus(); - return datafeedState == DatafeedState.STARTED; - }; - persistentTasksService.waitForPersistentTaskStatus(taskId, predicate, params.timeout, - new WaitForPersistentTaskStatusListener() { - @Override - public void onResponse(PersistentTask task) { - listener.onResponse(new Response(true)); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - - @Override - public void onTimeout(TimeValue timeout) { - listener.onFailure(new ElasticsearchException("Starting datafeed [" - + params.getDatafeedId() + "] timed out after [" + timeout + "]")); - } - }); - } - } - - public static class StartDatafeedPersistentTasksExecutor extends PersistentTasksExecutor { - private final DatafeedManager datafeedManager; - private final IndexNameExpressionResolver resolver; - - public StartDatafeedPersistentTasksExecutor(Settings settings, DatafeedManager datafeedManager) { - super(settings, TASK_NAME, MachineLearning.UTILITY_THREAD_POOL_NAME); - this.datafeedManager = datafeedManager; - this.resolver = new IndexNameExpressionResolver(settings); - } - - @Override - public Assignment getAssignment(DatafeedParams params, ClusterState clusterState) { - return new DatafeedNodeSelector(clusterState, resolver, params.getDatafeedId()).selectNode(); - } - - @Override - public void validate(DatafeedParams params, ClusterState clusterState) { - MlMetadata mlMetadata = clusterState.metaData().custom(MlMetadata.TYPE); - PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - StartDatafeedAction.validate(params.getDatafeedId(), mlMetadata, tasks); - new DatafeedNodeSelector(clusterState, resolver, params.getDatafeedId()).checkDatafeedTaskCanBeCreated(); - } - - @Override - protected void nodeOperation(AllocatedPersistentTask allocatedPersistentTask, DatafeedParams params, Task.Status status) { - DatafeedTask datafeedTask = (DatafeedTask) allocatedPersistentTask; - datafeedTask.datafeedManager = datafeedManager; - datafeedManager.run(datafeedTask, - (error) -> { - if (error != null) { - datafeedTask.markAsFailed(error); - } else { - datafeedTask.markAsCompleted(); - } - }); - } - - @Override - protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, - PersistentTask persistentTask) { - return new DatafeedTask(id, type, action, parentTaskId, persistentTask.getParams()); - } - } - - static void validate(String datafeedId, MlMetadata mlMetadata, PersistentTasksCustomMetaData tasks) { - DatafeedConfig datafeed = (mlMetadata == null) ? null : mlMetadata.getDatafeed(datafeedId); - if (datafeed == null) { - throw ExceptionsHelper.missingDatafeedException(datafeedId); - } - Job job = mlMetadata.getJobs().get(datafeed.getJobId()); - if (job == null) { - throw ExceptionsHelper.missingJobException(datafeed.getJobId()); - } - DatafeedJobValidator.validate(datafeed, job); - JobState jobState = MlMetadata.getJobState(datafeed.getJobId(), tasks); - if (jobState.isAnyOf(JobState.OPENING, JobState.OPENED) == false) { - throw ExceptionsHelper.conflictStatusException("cannot start datafeed [" + datafeedId + "] because job [" + job.getId() + - "] is " + jobState); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/StopDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/StopDatafeedAction.java deleted file mode 100644 index 472025a4447..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/StopDatafeedAction.java +++ /dev/null @@ -1,550 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionListenerResponseHandler; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.tasks.BaseTasksRequest; -import org.elasticsearch.action.support.tasks.BaseTasksResponse; -import org.elasticsearch.action.support.tasks.TransportTasksAction; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.discovery.MasterNotDiscoveredException; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.ml.datafeed.DatafeedState; -import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; -import org.elasticsearch.xpack.persistent.PersistentTasksService; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public class StopDatafeedAction - extends Action { - - public static final StopDatafeedAction INSTANCE = new StopDatafeedAction(); - public static final String NAME = "cluster:admin/xpack/ml/datafeed/stop"; - public static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueMinutes(5); - - private StopDatafeedAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends BaseTasksRequest implements ToXContentObject { - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField FORCE = new ParseField("force"); - public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds"); - - public static ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); - - static { - PARSER.declareString((request, datafeedId) -> request.datafeedId = datafeedId, DatafeedConfig.ID); - PARSER.declareString((request, val) -> - request.setStopTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareBoolean(Request::setForce, FORCE); - PARSER.declareBoolean(Request::setAllowNoDatafeeds, ALLOW_NO_DATAFEEDS); - } - - public static Request fromXContent(XContentParser parser) { - return parseRequest(null, parser); - } - - public static Request parseRequest(String datafeedId, XContentParser parser) { - Request request = PARSER.apply(parser, null); - if (datafeedId != null) { - request.datafeedId = datafeedId; - } - return request; - } - - private String datafeedId; - private String[] resolvedStartedDatafeedIds; - private TimeValue stopTimeout = DEFAULT_TIMEOUT; - private boolean force = false; - private boolean allowNoDatafeeds = true; - - public Request(String datafeedId) { - this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); - this.resolvedStartedDatafeedIds = new String[] { datafeedId }; - } - - Request() { - } - - private String getDatafeedId() { - return datafeedId; - } - - private String[] getResolvedStartedDatafeedIds() { - return resolvedStartedDatafeedIds; - } - - private void setResolvedStartedDatafeedIds(String[] resolvedStartedDatafeedIds) { - this.resolvedStartedDatafeedIds = resolvedStartedDatafeedIds; - } - - public TimeValue getStopTimeout() { - return stopTimeout; - } - - public void setStopTimeout(TimeValue stopTimeout) { - this.stopTimeout = ExceptionsHelper.requireNonNull(stopTimeout, TIMEOUT.getPreferredName()); - } - - public boolean isForce() { - return force; - } - - public void setForce(boolean force) { - this.force = force; - } - - public boolean allowNoDatafeeds() { - return allowNoDatafeeds; - } - - public void setAllowNoDatafeeds(boolean allowNoDatafeeds) { - this.allowNoDatafeeds = allowNoDatafeeds; - } - - @Override - public boolean match(Task task) { - for (String id : resolvedStartedDatafeedIds) { - String expectedDescription = MlMetadata.datafeedTaskId(id); - if (task instanceof StartDatafeedAction.DatafeedTask && expectedDescription.equals(task.getDescription())){ - return true; - } - } - return false; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - datafeedId = in.readString(); - resolvedStartedDatafeedIds = in.readStringArray(); - stopTimeout = new TimeValue(in); - force = in.readBoolean(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - allowNoDatafeeds = in.readBoolean(); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(datafeedId); - out.writeStringArray(resolvedStartedDatafeedIds); - stopTimeout.writeTo(out); - out.writeBoolean(force); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeBoolean(allowNoDatafeeds); - } - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, stopTimeout, force, allowNoDatafeeds); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - builder.field(TIMEOUT.getPreferredName(), stopTimeout.getStringRep()); - builder.field(FORCE.getPreferredName(), force); - builder.field(ALLOW_NO_DATAFEEDS.getPreferredName(), allowNoDatafeeds); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - Request other = (Request) obj; - return Objects.equals(datafeedId, other.datafeedId) && - Objects.equals(stopTimeout, other.stopTimeout) && - Objects.equals(force, other.force) && - Objects.equals(allowNoDatafeeds, other.allowNoDatafeeds); - } - } - - public static class Response extends BaseTasksResponse implements Writeable { - - private boolean stopped; - - public Response(boolean stopped) { - super(null, null); - this.stopped = stopped; - } - - public Response(StreamInput in) throws IOException { - super(null, null); - readFrom(in); - } - - public Response() { - super(null, null); - } - - public boolean isStopped() { - return stopped; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - stopped = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(stopped); - } - } - - static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client, StopDatafeedAction action) { - super(client, action, new Request()); - } - } - - public static class TransportAction extends TransportTasksAction { - - private final PersistentTasksService persistentTasksService; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, PersistentTasksService persistentTasksService) { - super(settings, StopDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, - indexNameExpressionResolver, Request::new, Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME); - this.persistentTasksService = persistentTasksService; - } - - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - final ClusterState state = clusterService.state(); - final DiscoveryNodes nodes = state.nodes(); - if (nodes.isLocalNodeElectedMaster() == false) { - // Delegates stop datafeed to elected master node, so it becomes the coordinating node. - // See comment in StartDatafeedAction.Transport class for more information. - if (nodes.getMasterNode() == null) { - listener.onFailure(new MasterNotDiscoveredException("no known master node")); - } else { - transportService.sendRequest(nodes.getMasterNode(), actionName, request, - new ActionListenerResponseHandler<>(listener, Response::new)); - } - } else { - MlMetadata mlMetadata = state.getMetaData().custom(MlMetadata.TYPE); - PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - - List startedDatafeeds = new ArrayList<>(); - List stoppingDatafeeds = new ArrayList<>(); - resolveDataFeedIds(request, mlMetadata, tasks, startedDatafeeds, stoppingDatafeeds); - if (startedDatafeeds.isEmpty() && stoppingDatafeeds.isEmpty()) { - listener.onResponse(new Response(true)); - return; - } - request.setResolvedStartedDatafeedIds(startedDatafeeds.toArray(new String[startedDatafeeds.size()])); - - if (request.force) { - forceStopDatafeed(request, listener, tasks, startedDatafeeds); - } else { - normalStopDatafeed(task, request, listener, tasks, startedDatafeeds, stoppingDatafeeds); - } - } - } - - private void normalStopDatafeed(Task task, Request request, ActionListener listener, - PersistentTasksCustomMetaData tasks, - List startedDatafeeds, List stoppingDatafeeds) { - Set executorNodes = new HashSet<>(); - for (String datafeedId : startedDatafeeds) { - PersistentTask datafeedTask = MlMetadata.getDatafeedTask(datafeedId, tasks); - if (datafeedTask == null || datafeedTask.isAssigned() == false) { - String message = "Cannot stop datafeed [" + datafeedId + "] because the datafeed does not have an assigned node." + - " Use force stop to stop the datafeed"; - listener.onFailure(ExceptionsHelper.conflictStatusException(message)); - return; - } else { - executorNodes.add(datafeedTask.getExecutorNode()); - } - } - - request.setNodes(executorNodes.toArray(new String[executorNodes.size()])); - - // wait for started and stopping datafeeds - // Map datafeedId -> datafeed task Id. - List allDataFeedsToWaitFor = Stream.concat( - startedDatafeeds.stream().map(id -> MlMetadata.datafeedTaskId(id)), - stoppingDatafeeds.stream().map(id -> MlMetadata.datafeedTaskId(id))) - .collect(Collectors.toList()); - - ActionListener finalListener = ActionListener.wrap( - r -> waitForDatafeedStopped(allDataFeedsToWaitFor, request, r, listener), - listener::onFailure); - - super.doExecute(task, request, finalListener); - } - - private void forceStopDatafeed(final Request request, final ActionListener listener, - PersistentTasksCustomMetaData tasks, final List startedDatafeeds) { - final AtomicInteger counter = new AtomicInteger(); - final AtomicArray failures = new AtomicArray<>(startedDatafeeds.size()); - - for (String datafeedId : startedDatafeeds) { - PersistentTask datafeedTask = MlMetadata.getDatafeedTask(datafeedId, tasks); - if (datafeedTask != null) { - persistentTasksService.cancelPersistentTask(datafeedTask.getId(), new ActionListener>() { - @Override - public void onResponse(PersistentTask persistentTask) { - if (counter.incrementAndGet() == startedDatafeeds.size()) { - sendResponseOrFailure(request.getDatafeedId(), listener, failures); - } - } - - @Override - public void onFailure(Exception e) { - final int slot = counter.incrementAndGet(); - failures.set(slot - 1, e); - if (slot == startedDatafeeds.size()) { - sendResponseOrFailure(request.getDatafeedId(), listener, failures); - } - } - }); - } else { - String msg = "Requested datafeed [" + request.getDatafeedId() + "] be force-stopped, but " + - "datafeed's task could not be found."; - logger.warn(msg); - final int slot = counter.incrementAndGet(); - failures.set(slot - 1, new RuntimeException(msg)); - if (slot == startedDatafeeds.size()) { - sendResponseOrFailure(request.getDatafeedId(), listener, failures); - } - } - } - } - - @Override - protected void taskOperation(Request request, StartDatafeedAction.DatafeedTask datafeedTaskTask, - ActionListener listener) { - DatafeedState taskStatus = DatafeedState.STOPPING; - datafeedTaskTask.updatePersistentStatus(taskStatus, ActionListener.wrap(task -> { - // we need to fork because we are now on a network threadpool - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - - @Override - protected void doRun() throws Exception { - datafeedTaskTask.stop("stop_datafeed (api)", request.getStopTimeout()); - listener.onResponse(new Response(true)); - } - }); - }, - e -> { - if (e instanceof ResourceNotFoundException) { - // the task has disappeared so must have stopped - listener.onResponse(new Response(true)); - } else { - listener.onFailure(e); - } - } - )); - } - - private void sendResponseOrFailure(String datafeedId, ActionListener listener, - AtomicArray failures) { - List catchedExceptions = failures.asList(); - if (catchedExceptions.size() == 0) { - listener.onResponse(new Response(true)); - return; - } - - String msg = "Failed to stop datafeed [" + datafeedId + "] with [" + catchedExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" - + catchedExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) - + "]"; - - ElasticsearchException e = new ElasticsearchException(msg, - catchedExceptions.get(0)); - listener.onFailure(e); - } - - // Wait for datafeed to be marked as stopped in cluster state, which means the datafeed persistent task has been removed - // This api returns when task has been cancelled, but that doesn't mean the persistent task has been removed from cluster state, - // so wait for that to happen here. - void waitForDatafeedStopped(List datafeedPersistentTaskIds, Request request, Response response, - ActionListener listener) { - persistentTasksService.waitForPersistentTasksStatus(persistentTasksCustomMetaData -> { - for (String persistentTaskId: datafeedPersistentTaskIds) { - if (persistentTasksCustomMetaData.getTask(persistentTaskId) != null) { - return false; - } - } - return true; - }, request.getTimeout(), new ActionListener() { - @Override - public void onResponse(Boolean result) { - listener.onResponse(response); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - } - - @Override - protected Response newResponse(Request request, List tasks, List taskOperationFailures, - List failedNodeExceptions) { - // number of resolved data feeds should be equal to the number of - // tasks, otherwise something went wrong - if (request.getResolvedStartedDatafeedIds().length != tasks.size()) { - if (taskOperationFailures.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(taskOperationFailures.get(0).getCause()); - } else if (failedNodeExceptions.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(failedNodeExceptions.get(0)); - } else { - // This can happen we the actual task in the node no longer exists, - // which means the datafeed(s) have already been closed. - return new Response(true); - } - } - - return new Response(tasks.stream().allMatch(Response::isStopped)); - } - - @Override - protected Response readTaskResponse(StreamInput in) throws IOException { - return new Response(in); - } - - } - - /** - * Resolve the requested datafeeds and add their IDs to one of the list - * arguments depending on datafeed state. - * - * @param request The stop datafeed request - * @param mlMetadata ML Metadata - * @param tasks Persistent task meta data - * @param startedDatafeedIds Started datafeed ids are added to this list - * @param stoppingDatafeedIds Stopping datafeed ids are added to this list - */ - static void resolveDataFeedIds(Request request, MlMetadata mlMetadata, - PersistentTasksCustomMetaData tasks, - List startedDatafeedIds, - List stoppingDatafeedIds) { - - Set expandedDatafeedIds = mlMetadata.expandDatafeedIds(request.getDatafeedId(), request.allowNoDatafeeds()); - for (String expandedDatafeedId : expandedDatafeedIds) { - validateDatafeedTask(expandedDatafeedId, mlMetadata); - addDatafeedTaskIdAccordingToState(expandedDatafeedId, MlMetadata.getDatafeedState(expandedDatafeedId, tasks), - startedDatafeedIds, stoppingDatafeedIds); - } - } - - private static void addDatafeedTaskIdAccordingToState(String datafeedId, - DatafeedState datafeedState, - List startedDatafeedIds, - List stoppingDatafeedIds) { - switch (datafeedState) { - case STARTED: - startedDatafeedIds.add(datafeedId); - break; - case STOPPED: - break; - case STOPPING: - stoppingDatafeedIds.add(datafeedId); - break; - default: - break; - } - } - /** - * Validate the stop request. - * Throws an {@code ResourceNotFoundException} if there is no datafeed - * with id {@code datafeedId} - * @param datafeedId The datafeed Id - * @param mlMetadata ML meta data - */ - static void validateDatafeedTask(String datafeedId, MlMetadata mlMetadata) { - DatafeedConfig datafeed = mlMetadata.getDatafeed(datafeedId); - if (datafeed == null) { - throw new ResourceNotFoundException(Messages.getMessage(Messages.DATAFEED_NOT_FOUND, datafeedId)); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java new file mode 100644 index 00000000000..af732f8c0b1 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -0,0 +1,426 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.datafeed.DatafeedState; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.job.config.JobTaskStatus; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.notifications.Auditor; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.persistent.PersistentTasksService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportCloseJobAction extends TransportTasksAction { + + private final Client client; + private final ClusterService clusterService; + private final Auditor auditor; + private final PersistentTasksService persistentTasksService; + + @Inject + public TransportCloseJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, Client client, + Auditor auditor, PersistentTasksService persistentTasksService) { + // We fork in innerTaskOperation(...), so we can use ThreadPool.Names.SAME here: + super(settings, CloseJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + indexNameExpressionResolver, CloseJobAction.Request::new, CloseJobAction.Response::new, ThreadPool.Names.SAME); + this.client = client; + this.clusterService = clusterService; + this.auditor = auditor; + this.persistentTasksService = persistentTasksService; + } + + /** + * Resolve the requested jobs and add their IDs to one of the list arguments + * depending on job state. + * + * Opened jobs are added to {@code openJobIds} and closing jobs added to {@code closingJobIds}. Failed jobs are added + * to {@code openJobIds} if allowFailed is set otherwise an exception is thrown. + * @param request The close job request + * @param state Cluster state + * @param openJobIds Opened or failed jobs are added to this list + * @param closingJobIds Closing jobs are added to this list + */ + static void resolveAndValidateJobId(CloseJobAction.Request request, ClusterState state, List openJobIds, + List closingJobIds) { + PersistentTasksCustomMetaData tasksMetaData = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + MlMetadata maybeNull = state.metaData().custom(MLMetadataField.TYPE); + final MlMetadata mlMetadata = (maybeNull == null) ? MlMetadata.EMPTY_METADATA : maybeNull; + + List failedJobs = new ArrayList<>(); + + Consumer jobIdProcessor = id -> { + validateJobAndTaskState(id, mlMetadata, tasksMetaData); + Job job = mlMetadata.getJobs().get(id); + if (job.isDeleted()) { + return; + } + addJobAccordingToState(id, tasksMetaData, openJobIds, closingJobIds, failedJobs); + }; + + Set expandedJobIds = mlMetadata.expandJobIds(request.getJobId(), request.allowNoJobs()); + expandedJobIds.stream().forEach(jobIdProcessor::accept); + if (request.isForce() == false && failedJobs.size() > 0) { + if (expandedJobIds.size() == 1) { + throw ExceptionsHelper.conflictStatusException("cannot close job [{}] because it failed, use force close", + expandedJobIds.iterator().next()); + } + throw ExceptionsHelper.conflictStatusException("one or more jobs have state failed, use force close"); + } + + // allowFailed == true + openJobIds.addAll(failedJobs); + } + + private static void addJobAccordingToState(String jobId, PersistentTasksCustomMetaData tasksMetaData, + List openJobs, List closingJobs, List failedJobs) { + + JobState jobState = MlMetadata.getJobState(jobId, tasksMetaData); + switch (jobState) { + case CLOSING: + closingJobs.add(jobId); + break; + case FAILED: + failedJobs.add(jobId); + break; + case OPENING: + case OPENED: + openJobs.add(jobId); + break; + default: + break; + } + } + + static TransportCloseJobAction.WaitForCloseRequest buildWaitForCloseRequest(List openJobIds, List closingJobIds, + PersistentTasksCustomMetaData tasks, Auditor auditor) { + TransportCloseJobAction.WaitForCloseRequest waitForCloseRequest = new TransportCloseJobAction.WaitForCloseRequest(); + + for (String jobId : openJobIds) { + PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + if (jobTask != null) { + auditor.info(jobId, Messages.JOB_AUDIT_CLOSING); + waitForCloseRequest.persistentTaskIds.add(jobTask.getId()); + waitForCloseRequest.jobsToFinalize.add(jobId); + } + } + for (String jobId : closingJobIds) { + PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + if (jobTask != null) { + waitForCloseRequest.persistentTaskIds.add(jobTask.getId()); + } + } + + return waitForCloseRequest; + } + + /** + * Validate the close request. Throws an exception on any of these conditions: + *
      + *
    • If the job does not exist
    • + *
    • If the job has a data feed the feed must be closed first
    • + *
    • If the job is opening
    • + *
    + * + * If the job is already closed an empty Optional is returned. + * @param jobId Job Id + * @param mlMetadata ML MetaData + * @param tasks Persistent tasks + */ + static void validateJobAndTaskState(String jobId, MlMetadata mlMetadata, PersistentTasksCustomMetaData tasks) { + Job job = mlMetadata.getJobs().get(jobId); + if (job == null) { + throw new ResourceNotFoundException("cannot close job, because job [" + jobId + "] does not exist"); + } + + Optional datafeed = mlMetadata.getDatafeedByJobId(jobId); + if (datafeed.isPresent()) { + DatafeedState datafeedState = MlMetadata.getDatafeedState(datafeed.get().getId(), tasks); + if (datafeedState != DatafeedState.STOPPED) { + throw ExceptionsHelper.conflictStatusException("cannot close job [{}], datafeed hasn't been stopped", jobId); + } + } + } + + @Override + protected void doExecute(Task task, CloseJobAction.Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + final DiscoveryNodes nodes = state.nodes(); + if (request.isLocal() == false && nodes.isLocalNodeElectedMaster() == false) { + // Delegates close job to elected master node, so it becomes the coordinating node. + // See comment in OpenJobAction.Transport class for more information. + if (nodes.getMasterNode() == null) { + listener.onFailure(new MasterNotDiscoveredException("no known master node")); + } else { + transportService.sendRequest(nodes.getMasterNode(), actionName, request, + new ActionListenerResponseHandler<>(listener, CloseJobAction.Response::new)); + } + } else { + /* + * Closing of multiple jobs: + * + * 1. Resolve and validate jobs first: if any job does not meet the + * criteria (e.g. open datafeed), fail immediately, do not close any + * job + * + * 2. Internally a task request is created for every open job, so there + * are n inner tasks for 1 user request + * + * 3. No task is created for closing jobs but those will be waited on + * + * 4. Collect n inner task results or failures and send 1 outer + * result/failure + */ + + List openJobIds = new ArrayList<>(); + List closingJobIds = new ArrayList<>(); + resolveAndValidateJobId(request, state, openJobIds, closingJobIds); + request.setOpenJobIds(openJobIds.toArray(new String[0])); + if (openJobIds.isEmpty() && closingJobIds.isEmpty()) { + listener.onResponse(new CloseJobAction.Response(true)); + return; + } + + if (request.isForce() == false) { + Set executorNodes = new HashSet<>(); + PersistentTasksCustomMetaData tasks = state.metaData().custom(PersistentTasksCustomMetaData.TYPE); + for (String resolvedJobId : request.getOpenJobIds()) { + PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(resolvedJobId, tasks); + if (jobTask == null || jobTask.isAssigned() == false) { + String message = "Cannot close job [" + resolvedJobId + "] because the job does not have an assigned node." + + " Use force close to close the job"; + listener.onFailure(ExceptionsHelper.conflictStatusException(message)); + return; + } else { + executorNodes.add(jobTask.getExecutorNode()); + } + } + request.setNodes(executorNodes.toArray(new String[executorNodes.size()])); + } + + if (request.isForce()) { + List jobIdsToForceClose = new ArrayList<>(openJobIds); + jobIdsToForceClose.addAll(closingJobIds); + forceCloseJob(state, request, jobIdsToForceClose, listener); + } else { + normalCloseJob(state, task, request, openJobIds, closingJobIds, listener); + } + } + } + + @Override + protected void taskOperation(CloseJobAction.Request request, TransportOpenJobAction.JobTask jobTask, + ActionListener listener) { + JobTaskStatus taskStatus = new JobTaskStatus(JobState.CLOSING, jobTask.getAllocationId()); + jobTask.updatePersistentStatus(taskStatus, ActionListener.wrap(task -> { + // we need to fork because we are now on a network threadpool and closeJob method may take a while to complete: + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + + @Override + protected void doRun() throws Exception { + jobTask.closeJob("close job (api)"); + listener.onResponse(new CloseJobAction.Response(true)); + } + }); + }, listener::onFailure)); + } + + @Override + protected CloseJobAction.Response newResponse(CloseJobAction.Request request, List tasks, + List taskOperationFailures, + List failedNodeExceptions) { + + // number of resolved jobs should be equal to the number of tasks, + // otherwise something went wrong + if (request.getOpenJobIds().length != tasks.size()) { + if (taskOperationFailures.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper + .convertToElastic(taskOperationFailures.get(0).getCause()); + } else if (failedNodeExceptions.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper + .convertToElastic(failedNodeExceptions.get(0)); + } else { + // This can happen we the actual task in the node no longer exists, + // which means the job(s) have already been closed. + return new CloseJobAction.Response(true); + } + } + + return new CloseJobAction.Response(tasks.stream().allMatch(CloseJobAction.Response::isClosed)); + } + + @Override + protected CloseJobAction.Response readTaskResponse(StreamInput in) throws IOException { + return new CloseJobAction.Response(in); + } + + private void forceCloseJob(ClusterState currentState, CloseJobAction.Request request, List jobIdsToForceClose, + ActionListener listener) { + PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + + final int numberOfJobs = jobIdsToForceClose.size(); + final AtomicInteger counter = new AtomicInteger(); + final AtomicArray failures = new AtomicArray<>(numberOfJobs); + + for (String jobId : jobIdsToForceClose) { + PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + if (jobTask != null) { + auditor.info(jobId, Messages.JOB_AUDIT_FORCE_CLOSING); + persistentTasksService.cancelPersistentTask(jobTask.getId(), + new ActionListener>() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { + if (counter.incrementAndGet() == numberOfJobs) { + sendResponseOrFailure(request.getJobId(), listener, failures); + } + } + + @Override + public void onFailure(Exception e) { + final int slot = counter.incrementAndGet(); + failures.set(slot - 1, e); + if (slot == numberOfJobs) { + sendResponseOrFailure(request.getJobId(), listener, failures); + } + } + + private void sendResponseOrFailure(String jobId, + ActionListener listener, + AtomicArray failures) { + List catchedExceptions = failures.asList(); + if (catchedExceptions.size() == 0) { + listener.onResponse(new CloseJobAction.Response(true)); + return; + } + + String msg = "Failed to force close job [" + jobId + "] with [" + + catchedExceptions.size() + + "] failures, rethrowing last, all Exceptions: [" + + catchedExceptions.stream().map(Exception::getMessage) + .collect(Collectors.joining(", ")) + + "]"; + + ElasticsearchException e = new ElasticsearchException(msg, + catchedExceptions.get(0)); + listener.onFailure(e); + } + }); + } + } + } + + private void normalCloseJob(ClusterState currentState, Task task, CloseJobAction.Request request, + List openJobIds, List closingJobIds, + ActionListener listener) { + PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + + WaitForCloseRequest waitForCloseRequest = buildWaitForCloseRequest(openJobIds, closingJobIds, tasks, auditor); + + // If there are no open or closing jobs in the request return + if (waitForCloseRequest.hasJobsToWaitFor() == false) { + listener.onResponse(new CloseJobAction.Response(true)); + return; + } + + boolean noOpenJobsToClose = openJobIds.isEmpty(); + if (noOpenJobsToClose) { + // No jobs to close but we still want to wait on closing jobs in the request + waitForJobClosed(request, waitForCloseRequest, new CloseJobAction.Response(true), listener); + return; + } + + ActionListener finalListener = + ActionListener.wrap( + r -> waitForJobClosed(request, waitForCloseRequest, + r, listener), + listener::onFailure); + super.doExecute(task, request, finalListener); + } + + static class WaitForCloseRequest { + List persistentTaskIds = new ArrayList<>(); + List jobsToFinalize = new ArrayList<>(); + + public boolean hasJobsToWaitFor() { + return persistentTaskIds.isEmpty() == false; + } + } + + // Wait for job to be marked as closed in cluster state, which means the job persistent task has been removed + // This api returns when job has been closed, but that doesn't mean the persistent task has been removed from cluster state, + // so wait for that to happen here. + void waitForJobClosed(CloseJobAction.Request request, WaitForCloseRequest waitForCloseRequest, CloseJobAction.Response response, + ActionListener listener) { + persistentTasksService.waitForPersistentTasksStatus(persistentTasksCustomMetaData -> { + for (String persistentTaskId : waitForCloseRequest.persistentTaskIds) { + if (persistentTasksCustomMetaData.getTask(persistentTaskId) != null) { + return false; + } + } + return true; + }, request.getCloseTimeout(), new ActionListener() { + @Override + public void onResponse(Boolean result) { + FinalizeJobExecutionAction.Request finalizeRequest = new FinalizeJobExecutionAction.Request( + waitForCloseRequest.jobsToFinalize.toArray(new String[0])); + executeAsyncWithOrigin(client, ML_ORIGIN, FinalizeJobExecutionAction.INSTANCE, finalizeRequest, + ActionListener.wrap(r -> listener.onResponse(response), listener::onFailure)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java new file mode 100644 index 00000000000..2375dc6b716 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.calendars.Calendar; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportDeleteCalendarAction extends HandledTransportAction { + + private final Client client; + + @Inject + public TransportDeleteCalendarAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client) { + super(settings, DeleteCalendarAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, DeleteCalendarAction.Request::new); + this.client = client; + } + + @Override + protected void doExecute(DeleteCalendarAction.Request request, ActionListener listener) { + + final String calendarId = request.getCalendarId(); + + DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, Calendar.documentId(calendarId)); + + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + bulkRequestBuilder.add(deleteRequest); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse bulkResponse) { + if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { + listener.onFailure(new ResourceNotFoundException("Could not delete calendar with ID [" + calendarId + + "] because it does not exist")); + } else { + listener.onResponse(new DeleteCalendarAction.Response(true)); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(ExceptionsHelper.serverError("Could not delete calendar with ID [" + calendarId + "]", e)); + } + }); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java new file mode 100644 index 00000000000..ba27ee13441 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDatafeedAction.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.persistent.PersistentTasksService; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportDeleteDatafeedAction extends TransportMasterNodeAction { + + private Client client; + private PersistentTasksService persistentTasksService; + + @Inject + public TransportDeleteDatafeedAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client, PersistentTasksService persistentTasksService) { + super(settings, DeleteDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, DeleteDatafeedAction.Request::new); + this.client = client; + this.persistentTasksService = persistentTasksService; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected DeleteDatafeedAction.Response newResponse() { + return new DeleteDatafeedAction.Response(); + } + + @Override + protected void masterOperation(DeleteDatafeedAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + if (request.isForce()) { + forceDeleteDatafeed(request, state, listener); + } else { + deleteDatafeedFromMetadata(request, listener); + } + } + + private void forceDeleteDatafeed(DeleteDatafeedAction.Request request, ClusterState state, + ActionListener listener) { + ActionListener finalListener = ActionListener.wrap( + response -> deleteDatafeedFromMetadata(request, listener), + listener::onFailure + ); + + ActionListener isolateDatafeedHandler = ActionListener.wrap( + response -> removeDatafeedTask(request, state, finalListener), + listener::onFailure + ); + + IsolateDatafeedAction.Request isolateDatafeedRequest = new IsolateDatafeedAction.Request(request.getDatafeedId()); + executeAsyncWithOrigin(client, ML_ORIGIN, IsolateDatafeedAction.INSTANCE, isolateDatafeedRequest, isolateDatafeedHandler); + } + + private void removeDatafeedTask(DeleteDatafeedAction.Request request, ClusterState state, ActionListener listener) { + PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(request.getDatafeedId(), tasks); + if (datafeedTask == null) { + listener.onResponse(true); + } else { + persistentTasksService.cancelPersistentTask(datafeedTask.getId(), + new ActionListener>() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ResourceNotFoundException) { + // the task has been removed in between + listener.onResponse(true); + } else { + listener.onFailure(e); + } + } + }); + } + } + + private void deleteDatafeedFromMetadata(DeleteDatafeedAction.Request request, ActionListener listener) { + clusterService.submitStateUpdateTask("delete-datafeed-" + request.getDatafeedId(), + new AckedClusterStateUpdateTask(request, listener) { + + @Override + protected DeleteDatafeedAction.Response newResponse(boolean acknowledged) { + return new DeleteDatafeedAction.Response(acknowledged); + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + MlMetadata currentMetadata = currentState.getMetaData().custom(MLMetadataField.TYPE); + PersistentTasksCustomMetaData persistentTasks = + currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) + .removeDatafeed(request.getDatafeedId(), persistentTasks).build(); + return ClusterState.builder(currentState).metaData( + MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()) + .build(); + } + }); + } + + @Override + protected ClusterBlockException checkBlock(DeleteDatafeedAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java new file mode 100644 index 00000000000..e788c19e5bc --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.job.retention.ExpiredForecastsRemover; +import org.elasticsearch.xpack.ml.job.retention.ExpiredModelSnapshotsRemover; +import org.elasticsearch.xpack.ml.job.retention.ExpiredResultsRemover; +import org.elasticsearch.xpack.ml.job.retention.MlDataRemover; +import org.elasticsearch.xpack.ml.notifications.Auditor; +import org.elasticsearch.xpack.ml.utils.VolatileCursorIterator; + +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +public class TransportDeleteExpiredDataAction extends HandledTransportAction { + + private final Client client; + private final ClusterService clusterService; + + @Inject + public TransportDeleteExpiredDataAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + Client client, ClusterService clusterService) { + super(settings, DeleteExpiredDataAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + DeleteExpiredDataAction.Request::new); + this.client = client; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(DeleteExpiredDataAction.Request request, ActionListener listener) { + logger.info("Deleting expired data"); + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> deleteExpiredData(listener)); + } + + private void deleteExpiredData(ActionListener listener) { + Auditor auditor = new Auditor(client, clusterService); + List dataRemovers = Arrays.asList( + new ExpiredResultsRemover(client, clusterService, auditor), + new ExpiredForecastsRemover(client), + new ExpiredModelSnapshotsRemover(client, clusterService) + ); + Iterator dataRemoversIterator = new VolatileCursorIterator<>(dataRemovers); + deleteExpiredData(dataRemoversIterator, listener); + } + + private void deleteExpiredData(Iterator mlDataRemoversIterator, + ActionListener listener) { + if (mlDataRemoversIterator.hasNext()) { + MlDataRemover remover = mlDataRemoversIterator.next(); + remover.remove(ActionListener.wrap( + booleanResponse -> deleteExpiredData(mlDataRemoversIterator, listener), + listener::onFailure)); + } else { + logger.info("Completed deletion of expired data"); + listener.onResponse(new DeleteExpiredDataAction.Response(true)); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java new file mode 100644 index 00000000000..ed883286b69 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.job.config.Detector; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.config.MlFilter; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportDeleteFilterAction extends HandledTransportAction { + + private final Client client; + private final ClusterService clusterService; + + @Inject + public TransportDeleteFilterAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, Client client) { + super(settings, DeleteFilterAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, DeleteFilterAction.Request::new); + this.clusterService = clusterService; + this.client = client; + } + + @Override + protected void doExecute(DeleteFilterAction.Request request, ActionListener listener) { + + final String filterId = request.getFilterId(); + ClusterState state = clusterService.state(); + MlMetadata currentMlMetadata = state.metaData().custom(MLMetadataField.TYPE); + Map jobs = currentMlMetadata.getJobs(); + List currentlyUsedBy = new ArrayList<>(); + for (Job job : jobs.values()) { + List detectors = job.getAnalysisConfig().getDetectors(); + for (Detector detector : detectors) { + if (detector.extractReferencedFilters().contains(filterId)) { + currentlyUsedBy.add(job.getId()); + break; + } + } + } + if (!currentlyUsedBy.isEmpty()) { + throw ExceptionsHelper.conflictStatusException("Cannot delete filter, currently used by jobs: " + + currentlyUsedBy); + } + + DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + bulkRequestBuilder.add(deleteRequest); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse bulkResponse) { + if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { + listener.onFailure(new ResourceNotFoundException("Could not delete filter with ID [" + filterId + + "] because it does not exist")); + } else { + listener.onResponse(new DeleteFilterAction.Response(true)); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(ExceptionsHelper.serverError("Could not delete filter with ID [" + filterId + "]", e)); + } + }); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java new file mode 100644 index 00000000000..159c720a3d3 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -0,0 +1,261 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobStorageDeletionTask; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.persistent.PersistentTasksService; + +import java.util.concurrent.TimeoutException; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportDeleteJobAction extends TransportMasterNodeAction { + + private final Client client; + private final JobManager jobManager; + private final PersistentTasksService persistentTasksService; + + @Inject + public TransportDeleteJobAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager, + PersistentTasksService persistentTasksService, Client client) { + super(settings, DeleteJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, DeleteJobAction.Request::new); + this.client = client; + this.jobManager = jobManager; + this.persistentTasksService = persistentTasksService; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected DeleteJobAction.Response newResponse() { + return new DeleteJobAction.Response(); + } + + @Override + protected void masterOperation(Task task, DeleteJobAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + + ActionListener markAsDeletingListener = ActionListener.wrap( + response -> { + if (request.isForce()) { + forceDeleteJob(request, (JobStorageDeletionTask) task, listener); + } else { + normalDeleteJob(request, (JobStorageDeletionTask) task, listener); + } + }, + e -> { + if (e instanceof MlMetadata.JobAlreadyMarkedAsDeletedException) { + // Don't kick off a parallel deletion task, but just wait for + // the in-progress request to finish. This is much safer in the + // case where the job with the same name might be immediately + // recreated after the delete returns. However, if a force + // delete times out then eventually kick off a parallel delete + // in case the original completely failed for some reason. + waitForDeletingJob(request.getJobId(), MachineLearningClientActionPlugin.STATE_PERSIST_RESTORE_TIMEOUT, + ActionListener.wrap( + listener::onResponse, + e2 -> { + if (request.isForce() && e2 instanceof TimeoutException) { + forceDeleteJob(request, (JobStorageDeletionTask) task, listener); + } else { + listener.onFailure(e2); + } + } + )); + } else { + listener.onFailure(e); + } + }); + + markJobAsDeleting(request.getJobId(), markAsDeletingListener, request.isForce()); + } + + @Override + protected void masterOperation(DeleteJobAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + throw new UnsupportedOperationException("the Task parameter is required"); + } + + @Override + protected ClusterBlockException checkBlock(DeleteJobAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + private void normalDeleteJob(DeleteJobAction.Request request, JobStorageDeletionTask task, + ActionListener listener) { + jobManager.deleteJob(request, task, listener); + } + + private void forceDeleteJob(DeleteJobAction.Request request, JobStorageDeletionTask task, + ActionListener listener) { + + final ClusterState state = clusterService.state(); + final String jobId = request.getJobId(); + + // 3. Delete the job + ActionListener removeTaskListener = new ActionListener() { + @Override + public void onResponse(Boolean response) { + jobManager.deleteJob(request, task, listener); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ResourceNotFoundException) { + jobManager.deleteJob(request, task, listener); + } else { + listener.onFailure(e); + } + } + }; + + // 2. Cancel the persistent task. This closes the process gracefully so + // the process should be killed first. + ActionListener killJobListener = ActionListener.wrap( + response -> { + removePersistentTask(request.getJobId(), state, removeTaskListener); + }, + e -> { + if (e instanceof ElasticsearchStatusException) { + // Killing the process marks the task as completed so it + // may have disappeared when we get here + removePersistentTask(request.getJobId(), state, removeTaskListener); + } else { + listener.onFailure(e); + } + } + ); + + // 1. Kill the job's process + killProcess(jobId, killJobListener); + } + + private void killProcess(String jobId, ActionListener listener) { + KillProcessAction.Request killRequest = new KillProcessAction.Request(jobId); + executeAsyncWithOrigin(client, ML_ORIGIN, KillProcessAction.INSTANCE, killRequest, listener); + } + + private void removePersistentTask(String jobId, ClusterState currentState, + ActionListener listener) { + PersistentTasksCustomMetaData tasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + + PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(jobId, tasks); + if (jobTask == null) { + listener.onResponse(null); + } else { + persistentTasksService.cancelPersistentTask(jobTask.getId(), + new ActionListener>() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + } + + void markJobAsDeleting(String jobId, ActionListener listener, boolean force) { + clusterService.submitStateUpdateTask("mark-job-as-deleted", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + MlMetadata currentMlMetadata = currentState.metaData().custom(MLMetadataField.TYPE); + PersistentTasksCustomMetaData tasks = currentState.metaData().custom(PersistentTasksCustomMetaData.TYPE); + MlMetadata.Builder builder = new MlMetadata.Builder(currentMlMetadata); + builder.markJobAsDeleted(jobId, tasks, force); + return buildNewClusterState(currentState, builder); + } + + @Override + public void onFailure(String source, Exception e) { + listener.onFailure(e); + } + + @Override + public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + logger.debug("Job [" + jobId + "] is successfully marked as deleted"); + listener.onResponse(true); + } + }); + } + + void waitForDeletingJob(String jobId, TimeValue timeout, ActionListener listener) { + ClusterStateObserver stateObserver = new ClusterStateObserver(clusterService, timeout, logger, threadPool.getThreadContext()); + + ClusterState clusterState = stateObserver.setAndGetObservedState(); + if (jobIsDeletedFromState(jobId, clusterState)) { + listener.onResponse(new DeleteJobAction.Response(true)); + } else { + stateObserver.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + listener.onResponse(new DeleteJobAction.Response(true)); + } + + @Override + public void onClusterServiceClose() { + listener.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + listener.onFailure(new TimeoutException("timed out after " + timeout)); + } + }, newClusterState -> jobIsDeletedFromState(jobId, newClusterState), timeout); + } + } + + static boolean jobIsDeletedFromState(String jobId, ClusterState clusterState) { + MlMetadata metadata = clusterState.metaData().custom(MLMetadataField.TYPE); + if (metadata == null) { + return true; + } + return !metadata.getJobs().containsKey(jobId); + } + + private static ClusterState buildNewClusterState(ClusterState currentState, MlMetadata.Builder builder) { + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, builder.build()).build()); + return newState.build(); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java new file mode 100644 index 00000000000..4e00fa4d0f5 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.notifications.Auditor; + +import java.util.Collections; +import java.util.List; + +public class TransportDeleteModelSnapshotAction extends HandledTransportAction { + + private final Client client; + private final JobProvider jobProvider; + private final ClusterService clusterService; + private final Auditor auditor; + + @Inject + public TransportDeleteModelSnapshotAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider, ClusterService clusterService, Client client, Auditor auditor) { + super(settings, DeleteModelSnapshotAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + DeleteModelSnapshotAction.Request::new); + this.client = client; + this.jobProvider = jobProvider; + this.clusterService = clusterService; + this.auditor = auditor; + } + + @Override + protected void doExecute(DeleteModelSnapshotAction.Request request, ActionListener listener) { + // Verify the snapshot exists + jobProvider.modelSnapshots( + request.getJobId(), 0, 1, null, null, null, true, request.getSnapshotId(), + page -> { + List deleteCandidates = page.results(); + if (deleteCandidates.size() > 1) { + logger.warn("More than one model found for [job_id: " + request.getJobId() + + ", snapshot_id: " + request.getSnapshotId() + "] tuple."); + } + + if (deleteCandidates.isEmpty()) { + listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, + request.getSnapshotId(), request.getJobId()))); + return; + } + ModelSnapshot deleteCandidate = deleteCandidates.get(0); + + // Verify the snapshot is not being used + Job job = JobManager.getJobOrThrowIfUnknown(request.getJobId(), clusterService.state()); + String currentModelInUse = job.getModelSnapshotId(); + if (currentModelInUse != null && currentModelInUse.equals(request.getSnapshotId())) { + throw new IllegalArgumentException(Messages.getMessage(Messages.REST_CANNOT_DELETE_HIGHEST_PRIORITY, + request.getSnapshotId(), request.getJobId())); + } + + // Delete the snapshot and any associated state files + JobDataDeleter deleter = new JobDataDeleter(client, request.getJobId()); + deleter.deleteModelSnapshots(Collections.singletonList(deleteCandidate), new ActionListener() { + @Override + public void onResponse(BulkResponse bulkResponse) { + String msg = Messages.getMessage(Messages.JOB_AUDIT_SNAPSHOT_DELETED, deleteCandidate.getSnapshotId(), + deleteCandidate.getDescription()); + auditor.info(request.getJobId(), msg); + logger.debug("[{}] {}", request.getJobId(), msg); + // We don't care about the bulk response, just that it succeeded + listener.onResponse(new DeleteModelSnapshotAction.Response(true)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + + }, listener::onFailure); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java new file mode 100644 index 00000000000..7da79f74a62 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.job.config.Job; + +import java.util.Date; + +public class TransportFinalizeJobExecutionAction extends TransportMasterNodeAction { + + @Inject + public TransportFinalizeJobExecutionAction(Settings settings, TransportService transportService, + ClusterService clusterService, ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, FinalizeJobExecutionAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, FinalizeJobExecutionAction.Request::new); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected FinalizeJobExecutionAction.Response newResponse() { + return new FinalizeJobExecutionAction.Response(); + } + + @Override + protected void masterOperation(FinalizeJobExecutionAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + String jobIdString = String.join(",", request.getJobIds()); + String source = "finalize_job_execution [" + jobIdString + "]"; + logger.debug("finalizing jobs [{}]", jobIdString); + clusterService.submitStateUpdateTask(source, new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + MlMetadata mlMetadata = currentState.metaData().custom(MLMetadataField.TYPE); + MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(mlMetadata); + Date finishedTime = new Date(); + + for (String jobId : request.getJobIds()) { + Job.Builder jobBuilder = new Job.Builder(mlMetadata.getJobs().get(jobId)); + jobBuilder.setFinishedTime(finishedTime); + mlMetadataBuilder.putJob(jobBuilder.build(), true); + } + ClusterState.Builder builder = ClusterState.builder(currentState); + return builder.metaData(new MetaData.Builder(currentState.metaData()) + .putCustom(MLMetadataField.TYPE, mlMetadataBuilder.build())) + .build(); + } + + @Override + public void onFailure(String source, Exception e) { + listener.onFailure(e); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, + ClusterState newState) { + logger.debug("finalized job [{}]", jobIdString); + listener.onResponse(new FinalizeJobExecutionAction.Response(true)); + } + }); + } + + @Override + protected ClusterBlockException checkBlock(FinalizeJobExecutionAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java new file mode 100644 index 00000000000..0b7f6048891 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; + +import java.io.IOException; + +public class TransportFlushJobAction extends TransportJobTaskAction { + + @Inject + public TransportFlushJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ClusterService clusterService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + AutodetectProcessManager processManager) { + super(settings, FlushJobAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + FlushJobAction.Request::new, FlushJobAction.Response::new, ThreadPool.Names.SAME, processManager); + // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread + } + + @Override + protected FlushJobAction.Response readTaskResponse(StreamInput in) throws IOException { + FlushJobAction.Response response = new FlushJobAction.Response(); + response.readFrom(in); + return response; + } + + @Override + protected void taskOperation(FlushJobAction.Request request, TransportOpenJobAction.JobTask task, + ActionListener listener) { + FlushJobParams.Builder paramsBuilder = FlushJobParams.builder(); + paramsBuilder.calcInterim(request.getCalcInterim()); + if (request.getAdvanceTime() != null) { + paramsBuilder.advanceTime(request.getAdvanceTime()); + } + if (request.getSkipTime() != null) { + paramsBuilder.skipTime(request.getSkipTime()); + } + TimeRange.Builder timeRangeBuilder = TimeRange.builder(); + if (request.getStart() != null) { + timeRangeBuilder.startTime(request.getStart()); + } + if (request.getEnd() != null) { + timeRangeBuilder.endTime(request.getEnd()); + } + paramsBuilder.forTimeRange(timeRangeBuilder.build()); + processManager.flushJob(task, paramsBuilder.build(), ActionListener.wrap( + flushAcknowledgement -> { + listener.onResponse(new FlushJobAction.Response(true, + flushAcknowledgement == null ? null : flushAcknowledgement.getLastFinalizedBucketEnd())); + }, listener::onFailure + )); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java new file mode 100644 index 00000000000..f1510f76a2a --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.ForecastParams; +import org.elasticsearch.xpack.ml.job.results.ForecastRequestStats; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.ml.action.ForecastJobAction.Request.DURATION; + +public class TransportForecastJobAction extends TransportJobTaskAction { + + private final JobProvider jobProvider; + @Inject + public TransportForecastJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ClusterService clusterService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider, + AutodetectProcessManager processManager) { + super(settings, ForecastJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + indexNameExpressionResolver, ForecastJobAction.Request::new, ForecastJobAction.Response::new, + ThreadPool.Names.SAME, processManager); + this.jobProvider = jobProvider; + // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread + } + + @Override + protected ForecastJobAction.Response readTaskResponse(StreamInput in) throws IOException { + ForecastJobAction.Response response = new ForecastJobAction.Response(); + response.readFrom(in); + return response; + } + + @Override + protected void taskOperation(ForecastJobAction.Request request, TransportOpenJobAction.JobTask task, + ActionListener listener) { + ClusterState state = clusterService.state(); + Job job = JobManager.getJobOrThrowIfUnknown(task.getJobId(), state); + validate(job, request); + + ForecastParams.Builder paramsBuilder = ForecastParams.builder(); + + if (request.getDuration() != null) { + paramsBuilder.duration(request.getDuration()); + } + + if (request.getExpiresIn() != null) { + paramsBuilder.expiresIn(request.getExpiresIn()); + } + + ForecastParams params = paramsBuilder.build(); + processManager.forecastJob(task, params, e -> { + if (e == null) { + Consumer forecastRequestStatsHandler = forecastRequestStats -> { + if (forecastRequestStats == null) { + // paranoia case, it should not happen that we do not retrieve a result + listener.onFailure(new ElasticsearchException( + "Cannot run forecast: internal error, please check the logs")); + } else if (forecastRequestStats.getStatus() == ForecastRequestStats.ForecastRequestStatus.FAILED) { + List messages = forecastRequestStats.getMessages(); + if (messages.size() > 0) { + listener.onFailure(ExceptionsHelper.badRequestException("Cannot run forecast: " + + messages.get(0))); + } else { + // paranoia case, it should not be possible to have an empty message list + listener.onFailure( + new ElasticsearchException( + "Cannot run forecast: internal error, please check the logs")); + } + } else { + listener.onResponse(new ForecastJobAction.Response(true, params.getForecastId())); + } + }; + + jobProvider.getForecastRequestStats(request.getJobId(), params.getForecastId(), + forecastRequestStatsHandler, listener::onFailure); + } else { + listener.onFailure(e); + } + }); + } + + static void validate(Job job, ForecastJobAction.Request request) { + if (job.getJobVersion() == null || job.getJobVersion().before(Version.V_6_1_0)) { + throw ExceptionsHelper.badRequestException( + "Cannot run forecast because jobs created prior to version 6.1 are not supported"); + } + + if (request.getDuration() != null) { + TimeValue duration = request.getDuration(); + TimeValue bucketSpan = job.getAnalysisConfig().getBucketSpan(); + + if (duration.compareTo(bucketSpan) < 0) { + throw ExceptionsHelper.badRequestException( + "[" + DURATION.getPreferredName() + "] must be greater or equal to the bucket span: [" + + duration.getStringRep() + "/" + bucketSpan.getStringRep() + "]"); + } + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java new file mode 100644 index 00000000000..c81bf65e459 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetBucketsAction.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; + +public class TransportGetBucketsAction extends HandledTransportAction { + + private final JobProvider jobProvider; + private final JobManager jobManager; + private final Client client; + + @Inject + public TransportGetBucketsAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider, JobManager jobManager, Client client) { + super(settings, GetBucketsAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + GetBucketsAction.Request::new); + this.jobProvider = jobProvider; + this.jobManager = jobManager; + this.client = client; + } + + @Override + protected void doExecute(GetBucketsAction.Request request, ActionListener listener) { + jobManager.getJobOrThrowIfUnknown(request.getJobId()); + + BucketsQueryBuilder query = + new BucketsQueryBuilder().expand(request.isExpand()) + .includeInterim(request.isExcludeInterim() == false) + .start(request.getStart()) + .end(request.getEnd()) + .anomalyScoreThreshold(request.getAnomalyScore()) + .sortField(request.getSort()) + .sortDescending(request.isDescending()); + + if (request.getPageParams() != null) { + query.from(request.getPageParams().getFrom()) + .size(request.getPageParams().getSize()); + } + if (request.getTimestamp() != null) { + query.timestamp(request.getTimestamp()); + } else { + query.start(request.getStart()); + query.end(request.getEnd()); + } + jobProvider.buckets(request.getJobId(), query, q -> + listener.onResponse(new GetBucketsAction.Response(q)), listener::onFailure, client); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java new file mode 100644 index 00000000000..8c282caa77c --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.calendars.SpecialEvent; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.SpecialEventsQueryBuilder; + +import java.util.Collections; + +public class TransportGetCalendarEventsAction extends HandledTransportAction { + + private final JobProvider jobProvider; + + @Inject + public TransportGetCalendarEventsAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider) { + super(settings, GetCalendarEventsAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, GetCalendarEventsAction.Request::new); + this.jobProvider = jobProvider; + } + + @Override + protected void doExecute(GetCalendarEventsAction.Request request, + ActionListener listener) { + ActionListener calendarExistsListener = ActionListener.wrap( + r -> { + SpecialEventsQueryBuilder query = new SpecialEventsQueryBuilder() + .after(request.getAfter()) + .before(request.getBefore()) + .from(request.getPageParams().getFrom()) + .size(request.getPageParams().getSize()); + + if (GetCalendarsAction.Request.ALL.equals(request.getCalendarId()) == false) { + query.calendarIds(Collections.singletonList(request.getCalendarId())); + } + + ActionListener> eventsListener = ActionListener.wrap( + events -> { + listener.onResponse(new GetCalendarEventsAction.Response(events)); + }, + listener::onFailure + ); + + if (request.getJobId() != null) { + jobProvider.specialEventsForJob(request.getJobId(), query, eventsListener); + } else { + jobProvider.specialEvents(query, eventsListener); + } + }, + listener::onFailure); + + checkCalendarExists(request.getCalendarId(), calendarExistsListener); + } + + private void checkCalendarExists(String calendarId, ActionListener listener) { + if (GetCalendarsAction.Request.ALL.equals(calendarId)) { + listener.onResponse(true); + return; + } + + jobProvider.calendar(calendarId, ActionListener.wrap( + c -> listener.onResponse(true), + listener::onFailure + )); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java new file mode 100644 index 00000000000..515e957d350 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.action.util.PageParams; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.calendars.Calendar; +import org.elasticsearch.xpack.ml.job.persistence.CalendarQueryBuilder; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportGetCalendarsAction extends HandledTransportAction { + + private final JobProvider jobProvider; + + @Inject + public TransportGetCalendarsAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider) { + super(settings, GetCalendarsAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, GetCalendarsAction.Request::new); + this.jobProvider = jobProvider; + } + + @Override + protected void doExecute(GetCalendarsAction.Request request, ActionListener listener) { + final String calendarId = request.getCalendarId(); + if (request.getCalendarId() != null && GetCalendarsAction.Request.ALL.equals(request.getCalendarId()) == false) { + getCalendar(calendarId, listener); + } else { + PageParams pageParams = request.getPageParams(); + if (pageParams == null) { + pageParams = PageParams.defaultParams(); + } + getCalendars(pageParams, listener); + } + } + + private void getCalendar(String calendarId, ActionListener listener) { + + jobProvider.calendar(calendarId, ActionListener.wrap( + calendar -> { + QueryPage page = new QueryPage<>(Collections.singletonList(calendar), 1, Calendar.RESULTS_FIELD); + listener.onResponse(new GetCalendarsAction.Response(page)); + }, + listener::onFailure + )); + } + + private void getCalendars(PageParams pageParams, ActionListener listener) { + CalendarQueryBuilder query = new CalendarQueryBuilder().pageParams(pageParams).sort(true); + jobProvider.calendars(query, ActionListener.wrap( + calendars -> { + listener.onResponse(new GetCalendarsAction.Response(calendars)); + }, + listener::onFailure + )); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java new file mode 100644 index 00000000000..573082b998c --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; + +public class TransportGetCategoriesAction extends HandledTransportAction { + + private final JobProvider jobProvider; + private final Client client; + private final JobManager jobManager; + + @Inject + public TransportGetCategoriesAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider, Client client, JobManager jobManager) { + super(settings, GetCategoriesAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + GetCategoriesAction.Request::new); + this.jobProvider = jobProvider; + this.client = client; + this.jobManager = jobManager; + } + + @Override + protected void doExecute(GetCategoriesAction.Request request, ActionListener listener) { + jobManager.getJobOrThrowIfUnknown(request.getJobId()); + + Integer from = request.getPageParams() != null ? request.getPageParams().getFrom() : null; + Integer size = request.getPageParams() != null ? request.getPageParams().getSize() : null; + jobProvider.categoryDefinitions(request.getJobId(), request.getCategoryId(), from, size, + r -> listener.onResponse(new GetCategoriesAction.Response(r)), listener::onFailure, client); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java new file mode 100644 index 00000000000..9ed41f263e2 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +public class TransportGetDatafeedsAction extends TransportMasterNodeReadAction { + + @Inject + public TransportGetDatafeedsAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, GetDatafeedsAction.NAME, transportService, clusterService, threadPool, actionFilters, + GetDatafeedsAction.Request::new, indexNameExpressionResolver); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected GetDatafeedsAction.Response newResponse() { + return new GetDatafeedsAction.Response(); + } + + @Override + protected void masterOperation(GetDatafeedsAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + logger.debug("Get datafeed '{}'", request.getDatafeedId()); + + MlMetadata mlMetadata = state.metaData().custom(MLMetadataField.TYPE); + if (mlMetadata == null) { + mlMetadata = MlMetadata.EMPTY_METADATA; + } + Set expandedDatafeedIds = mlMetadata.expandDatafeedIds(request.getDatafeedId(), request.allowNoDatafeeds()); + List datafeedConfigs = new ArrayList<>(); + for (String expandedDatafeedId : expandedDatafeedIds) { + datafeedConfigs.add(mlMetadata.getDatafeed(expandedDatafeedId)); + } + + listener.onResponse(new GetDatafeedsAction.Response(new QueryPage<>(datafeedConfigs, datafeedConfigs.size(), + DatafeedConfig.RESULTS_FIELD))); + } + + @Override + protected ClusterBlockException checkBlock(GetDatafeedsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java new file mode 100644 index 00000000000..5d44b2a4e1e --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsStatsAction.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.datafeed.DatafeedState; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; + +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +public class TransportGetDatafeedsStatsAction extends TransportMasterNodeReadAction { + + @Inject + public TransportGetDatafeedsStatsAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, GetDatafeedsStatsAction.NAME, transportService, clusterService, threadPool, actionFilters, + GetDatafeedsStatsAction.Request::new, indexNameExpressionResolver); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected GetDatafeedsStatsAction.Response newResponse() { + return new GetDatafeedsStatsAction.Response(); + } + + @Override + protected void masterOperation(GetDatafeedsStatsAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + logger.debug("Get stats for datafeed '{}'", request.getDatafeedId()); + + MlMetadata mlMetadata = state.metaData().custom(MLMetadataField.TYPE); + if (mlMetadata == null) { + mlMetadata = MlMetadata.EMPTY_METADATA; + } + + Set expandedDatafeedIds = mlMetadata.expandDatafeedIds(request.getDatafeedId(), request.allowNoDatafeeds()); + + PersistentTasksCustomMetaData tasksInProgress = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + List results = expandedDatafeedIds.stream() + .map(datafeedId -> getDatafeedStats(datafeedId, state, tasksInProgress)) + .collect(Collectors.toList()); + QueryPage statsPage = new QueryPage<>(results, results.size(), + DatafeedConfig.RESULTS_FIELD); + listener.onResponse(new GetDatafeedsStatsAction.Response(statsPage)); + } + + private static GetDatafeedsStatsAction.Response.DatafeedStats getDatafeedStats(String datafeedId, ClusterState state, + PersistentTasksCustomMetaData tasks) { + PersistentTasksCustomMetaData.PersistentTask task = MlMetadata.getDatafeedTask(datafeedId, tasks); + DatafeedState datafeedState = MlMetadata.getDatafeedState(datafeedId, tasks); + DiscoveryNode node = null; + String explanation = null; + if (task != null) { + node = state.nodes().get(task.getExecutorNode()); + explanation = task.getAssignment().getExplanation(); + } + return new GetDatafeedsStatsAction.Response.DatafeedStats(datafeedId, datafeedState, node, explanation); + } + + @Override + protected ClusterBlockException checkBlock(GetDatafeedsStatsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java new file mode 100644 index 00000000000..80195d4b94e --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.action.util.PageParams; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.job.config.MlFilter; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportGetFiltersAction extends HandledTransportAction { + + private final Client client; + + @Inject + public TransportGetFiltersAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client) { + super(settings, GetFiltersAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, GetFiltersAction.Request::new); + this.client = client; + } + + @Override + protected void doExecute(GetFiltersAction.Request request, ActionListener listener) { + final String filterId = request.getFilterId(); + if (!Strings.isNullOrEmpty(filterId)) { + getFilter(filterId, listener); + } else { + PageParams pageParams = request.getPageParams(); + if (pageParams == null) { + pageParams = PageParams.defaultParams(); + } + getFilters(pageParams, listener); + } + } + + private void getFilter(String filterId, ActionListener listener) { + GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); + executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener() { + @Override + public void onResponse(GetResponse getDocResponse) { + + try { + QueryPage responseBody; + if (getDocResponse.isExists()) { + BytesReference docSource = getDocResponse.getSourceAsBytesRef(); + XContentParser parser = + XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource); + MlFilter filter = MlFilter.PARSER.apply(parser, null).build(); + responseBody = new QueryPage<>(Collections.singletonList(filter), 1, MlFilter.RESULTS_FIELD); + + GetFiltersAction.Response filterResponse = new GetFiltersAction.Response(responseBody); + listener.onResponse(filterResponse); + } else { + this.onFailure(QueryPage.emptyQueryPage(MlFilter.RESULTS_FIELD)); + } + + } catch (Exception e) { + this.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private void getFilters(PageParams pageParams, ActionListener listener) { + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder() + .from(pageParams.getFrom()) + .size(pageParams.getSize()) + .query(QueryBuilders.termQuery(MlFilter.TYPE.getPreferredName(), MlFilter.FILTER_TYPE)); + + SearchRequest searchRequest = new SearchRequest(MlMetaIndex.INDEX_NAME) + .indicesOptions(JobProvider.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)) + .source(sourceBuilder); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, new ActionListener() { + @Override + public void onResponse(SearchResponse response) { + List docs = new ArrayList<>(); + for (SearchHit hit : response.getHits().getHits()) { + BytesReference docSource = hit.getSourceRef(); + try (XContentParser parser = XContentFactory.xContent(docSource).createParser( + NamedXContentRegistry.EMPTY, docSource)) { + docs.add(MlFilter.PARSER.apply(parser, null).build()); + } catch (IOException e) { + this.onFailure(e); + } + } + + GetFiltersAction.Response filterResponse = new GetFiltersAction.Response(new QueryPage<>(docs, docs.size(), + MlFilter.RESULTS_FIELD)); + listener.onResponse(filterResponse); + } + + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, + client::search); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java new file mode 100644 index 00000000000..7c39f56c7cb --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; + +public class TransportGetInfluencersAction extends HandledTransportAction { + + private final JobProvider jobProvider; + private final Client client; + private final JobManager jobManager; + + @Inject + public TransportGetInfluencersAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider, Client client, JobManager jobManager) { + super(settings, GetInfluencersAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + GetInfluencersAction.Request::new); + this.jobProvider = jobProvider; + this.client = client; + this.jobManager = jobManager; + } + + @Override + protected void doExecute(GetInfluencersAction.Request request, ActionListener listener) { + jobManager.getJobOrThrowIfUnknown(request.getJobId()); + + InfluencersQueryBuilder.InfluencersQuery query = new InfluencersQueryBuilder() + .includeInterim(request.isExcludeInterim() == false) + .start(request.getStart()) + .end(request.getEnd()) + .from(request.getPageParams().getFrom()) + .size(request.getPageParams().getSize()) + .influencerScoreThreshold(request.getInfluencerScore()) + .sortField(request.getSort()) + .sortDescending(request.isDescending()).build(); + jobProvider.influencers(request.getJobId(), query, + page -> listener.onResponse(new GetInfluencersAction.Response(page)), listener::onFailure, client); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsAction.java new file mode 100644 index 00000000000..8c5bf3da675 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.config.Job; + +public class TransportGetJobsAction extends TransportMasterNodeReadAction { + + private final JobManager jobManager; + + @Inject + public TransportGetJobsAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + JobManager jobManager) { + super(settings, GetJobsAction.NAME, transportService, clusterService, threadPool, actionFilters, + GetJobsAction.Request::new, indexNameExpressionResolver); + this.jobManager = jobManager; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected GetJobsAction.Response newResponse() { + return new GetJobsAction.Response(); + } + + @Override + protected void masterOperation(GetJobsAction.Request request, ClusterState state, ActionListener listener) + throws Exception { + logger.debug("Get job '{}'", request.getJobId()); + QueryPage jobs = jobManager.expandJobs(request.getJobId(), request.allowNoJobs(), state); + listener.onResponse(new GetJobsAction.Response(jobs)); + } + + @Override + protected ClusterBlockException checkBlock(GetJobsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java new file mode 100644 index 00000000000..ce8c7539c1f --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; + +import java.io.IOException; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +public class TransportGetJobsStatsAction extends TransportTasksAction> { + + private final ClusterService clusterService; + private final AutodetectProcessManager processManager; + private final JobProvider jobProvider; + + @Inject + public TransportGetJobsStatsAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + AutodetectProcessManager processManager, JobProvider jobProvider) { + super(settings, GetJobsStatsAction.NAME, threadPool, clusterService, transportService, actionFilters, + indexNameExpressionResolver, GetJobsStatsAction.Request::new, GetJobsStatsAction.Response::new, + ThreadPool.Names.MANAGEMENT); + this.clusterService = clusterService; + this.processManager = processManager; + this.jobProvider = jobProvider; + } + + @Override + protected void doExecute(Task task, GetJobsStatsAction.Request request, ActionListener listener) { + MlMetadata clusterMlMetadata = clusterService.state().metaData().custom(MLMetadataField.TYPE); + MlMetadata mlMetadata = (clusterMlMetadata == null) ? MlMetadata.EMPTY_METADATA : clusterMlMetadata; + request.setExpandedJobsIds(new ArrayList<>(mlMetadata.expandJobIds(request.getJobId(), request.allowNoJobs()))); + ActionListener finalListener = listener; + listener = ActionListener.wrap(response -> gatherStatsForClosedJobs(mlMetadata, + request, response, finalListener), listener::onFailure); + super.doExecute(task, request, listener); + } + + @Override + protected GetJobsStatsAction.Response newResponse(GetJobsStatsAction.Request request, + List> tasks, + List taskOperationFailures, + List failedNodeExceptions) { + List stats = new ArrayList<>(); + for (QueryPage task : tasks) { + stats.addAll(task.results()); + } + return new GetJobsStatsAction.Response(taskOperationFailures, failedNodeExceptions, new QueryPage<>(stats, stats.size(), + Job.RESULTS_FIELD)); + } + + @Override + protected QueryPage readTaskResponse(StreamInput in) throws IOException { + return new QueryPage<>(in, GetJobsStatsAction.Response.JobStats::new); + } + + @Override + protected void taskOperation(GetJobsStatsAction.Request request, TransportOpenJobAction.JobTask task, + ActionListener> listener) { + String jobId = task.getJobId(); + logger.debug("Get stats for job [{}]", jobId); + ClusterState state = clusterService.state(); + PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + Optional> stats = processManager.getStatistics(task); + if (stats.isPresent()) { + PersistentTasksCustomMetaData.PersistentTask pTask = MlMetadata.getJobTask(jobId, tasks); + DiscoveryNode node = state.nodes().get(pTask.getExecutorNode()); + JobState jobState = MlMetadata.getJobState(jobId, tasks); + String assignmentExplanation = pTask.getAssignment().getExplanation(); + TimeValue openTime = durationToTimeValue(processManager.jobOpenTime(task)); + GetJobsStatsAction.Response.JobStats jobStats = new GetJobsStatsAction.Response.JobStats(jobId, stats.get().v1(), + stats.get().v2(), jobState, node, assignmentExplanation, openTime); + listener.onResponse(new QueryPage<>(Collections.singletonList(jobStats), 1, Job.RESULTS_FIELD)); + } else { + listener.onResponse(new QueryPage<>(Collections.emptyList(), 0, Job.RESULTS_FIELD)); + } + } + + // Up until now we gathered the stats for jobs that were open, + // This method will fetch the stats for missing jobs, that was stored in the jobs index + void gatherStatsForClosedJobs(MlMetadata mlMetadata, GetJobsStatsAction.Request request, GetJobsStatsAction.Response response, + ActionListener listener) { + List jobIds = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, + request.getExpandedJobsIds(), response.getResponse().results()); + if (jobIds.isEmpty()) { + listener.onResponse(response); + return; + } + + AtomicInteger counter = new AtomicInteger(jobIds.size()); + AtomicArray jobStats = new AtomicArray<>(jobIds.size()); + PersistentTasksCustomMetaData tasks = clusterService.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + for (int i = 0; i < jobIds.size(); i++) { + int slot = i; + String jobId = jobIds.get(i); + gatherDataCountsAndModelSizeStats(jobId, (dataCounts, modelSizeStats) -> { + JobState jobState = MlMetadata.getJobState(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask pTask = MlMetadata.getJobTask(jobId, tasks); + String assignmentExplanation = null; + if (pTask != null) { + assignmentExplanation = pTask.getAssignment().getExplanation(); + } + jobStats.set(slot, new GetJobsStatsAction.Response.JobStats(jobId, dataCounts, modelSizeStats, jobState, null, + assignmentExplanation, null)); + if (counter.decrementAndGet() == 0) { + List results = response.getResponse().results(); + results.addAll(jobStats.asList()); + listener.onResponse(new GetJobsStatsAction.Response(response.getTaskFailures(), response.getNodeFailures(), + new QueryPage<>(results, results.size(), Job.RESULTS_FIELD))); + } + }, listener::onFailure); + } + } + + void gatherDataCountsAndModelSizeStats(String jobId, BiConsumer handler, + Consumer errorHandler) { + jobProvider.dataCounts(jobId, dataCounts -> { + jobProvider.modelSizeStats(jobId, modelSizeStats -> { + handler.accept(dataCounts, modelSizeStats); + }, errorHandler); + }, errorHandler); + } + + static TimeValue durationToTimeValue(Optional duration) { + if (duration.isPresent()) { + return TimeValue.timeValueSeconds(duration.get().getSeconds()); + } else { + return null; + } + } + + static List determineNonDeletedJobIdsWithoutLiveStats(MlMetadata mlMetadata, + List requestedJobIds, + List stats) { + Set excludeJobIds = stats.stream().map(GetJobsStatsAction.Response.JobStats::getJobId).collect(Collectors.toSet()); + return requestedJobIds.stream().filter(jobId -> !excludeJobIds.contains(jobId) && + !mlMetadata.isJobDeleted(jobId)).collect(Collectors.toList()); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java new file mode 100644 index 00000000000..c5546f9f0eb --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetModelSnapshotsAction.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; + +import java.util.stream.Collectors; + +public class TransportGetModelSnapshotsAction extends HandledTransportAction { + + private final JobProvider jobProvider; + private final JobManager jobManager; + + @Inject + public TransportGetModelSnapshotsAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider, JobManager jobManager) { + super(settings, GetModelSnapshotsAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + GetModelSnapshotsAction.Request::new); + this.jobProvider = jobProvider; + this.jobManager = jobManager; + } + + @Override + protected void doExecute(GetModelSnapshotsAction.Request request, ActionListener listener) { + logger.debug("Get model snapshots for job {} snapshot ID {}. from = {}, size = {}" + + " start = '{}', end='{}', sort={} descending={}", + request.getJobId(), request.getSnapshotId(), request.getPageParams().getFrom(), request.getPageParams().getSize(), + request.getStart(), request.getEnd(), request.getSort(), request.getDescOrder()); + + jobManager.getJobOrThrowIfUnknown(request.getJobId()); + + jobProvider.modelSnapshots(request.getJobId(), request.getPageParams().getFrom(), request.getPageParams().getSize(), + request.getStart(), request.getEnd(), request.getSort(), request.getDescOrder(), request.getSnapshotId(), + page -> { + listener.onResponse(new GetModelSnapshotsAction.Response(clearQuantiles(page))); + }, listener::onFailure); + } + + public static QueryPage clearQuantiles(QueryPage page) { + if (page.results() == null) { + return page; + } + return new QueryPage<>(page.results().stream().map(snapshot -> + new ModelSnapshot.Builder(snapshot).setQuantiles(null).build()) + .collect(Collectors.toList()), page.count(), page.getResultsField()); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java new file mode 100644 index 00000000000..b2d7ca46438 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -0,0 +1,277 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; +import org.elasticsearch.search.aggregations.metrics.max.Max; +import org.elasticsearch.search.aggregations.metrics.min.Min; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsAggregator; +import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsCollector; +import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProcessor; +import org.elasticsearch.xpack.ml.job.persistence.overallbuckets.OverallBucketsProvider; +import org.elasticsearch.xpack.ml.job.results.Bucket; +import org.elasticsearch.xpack.ml.job.results.OverallBucket; +import org.elasticsearch.xpack.ml.job.results.Result; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.utils.Intervals; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportGetOverallBucketsAction extends HandledTransportAction { + + private static final String EARLIEST_TIME = "earliest_time"; + private static final String LATEST_TIME = "latest_time"; + + private final Client client; + private final ClusterService clusterService; + private final JobManager jobManager; + + @Inject + public TransportGetOverallBucketsAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, JobManager jobManager, Client client) { + super(settings, GetOverallBucketsAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + GetOverallBucketsAction.Request::new); + this.clusterService = clusterService; + this.client = client; + this.jobManager = jobManager; + } + + @Override + protected void doExecute(GetOverallBucketsAction.Request request, ActionListener listener) { + QueryPage jobsPage = jobManager.expandJobs(request.getJobId(), request.allowNoJobs(), clusterService.state()); + if (jobsPage.count() == 0) { + listener.onResponse(new GetOverallBucketsAction.Response()); + return; + } + + // As computing and potentially aggregating overall buckets might take a while, + // we run in a different thread to avoid blocking the network thread. + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { + try { + getOverallBuckets(request, jobsPage.results(), listener); + } catch (Exception e) { + listener.onFailure(e); + } + }); + } + + private void getOverallBuckets(GetOverallBucketsAction.Request request, List jobs, + ActionListener listener) { + JobsContext jobsContext = JobsContext.build(jobs, request); + + ActionListener> overallBucketsListener = ActionListener.wrap(overallBuckets -> { + listener.onResponse(new GetOverallBucketsAction.Response(new QueryPage<>(overallBuckets, overallBuckets.size(), + OverallBucket.RESULTS_FIELD))); + }, listener::onFailure); + + ActionListener chunkedBucketSearcherListener = ActionListener.wrap(searcher -> { + if (searcher == null) { + listener.onResponse(new GetOverallBucketsAction.Response()); + return; + } + searcher.searchAndComputeOverallBuckets(overallBucketsListener); + }, listener::onFailure); + + OverallBucketsProvider overallBucketsProvider = new OverallBucketsProvider(jobsContext.maxBucketSpan, request.getTopN(), + request.getOverallScore()); + OverallBucketsProcessor overallBucketsProcessor = requiresAggregation(request, jobsContext.maxBucketSpan) ? + new OverallBucketsAggregator(request.getBucketSpan()): new OverallBucketsCollector(); + initChunkedBucketSearcher(request, jobsContext, overallBucketsProvider, overallBucketsProcessor, chunkedBucketSearcherListener); + } + + private static boolean requiresAggregation(GetOverallBucketsAction.Request request, TimeValue maxBucketSpan) { + return request.getBucketSpan() != null && !request.getBucketSpan().equals(maxBucketSpan); + } + + private static void checkValidBucketSpan(TimeValue bucketSpan, TimeValue maxBucketSpan) { + if (bucketSpan != null && bucketSpan.compareTo(maxBucketSpan) < 0) { + throw ExceptionsHelper.badRequestException("Param [{}] must be greater or equal to the max bucket_span [{}]", + GetOverallBucketsAction.Request.BUCKET_SPAN, maxBucketSpan.getStringRep()); + } + } + + private void initChunkedBucketSearcher(GetOverallBucketsAction.Request request, JobsContext jobsContext, + OverallBucketsProvider overallBucketsProvider, + OverallBucketsProcessor overallBucketsProcessor, + ActionListener listener) { + long maxBucketSpanMillis = jobsContext.maxBucketSpan.millis(); + SearchRequest searchRequest = buildSearchRequest(request.getStart(), request.getEnd(), request.isExcludeInterim(), + maxBucketSpanMillis, jobsContext.indices); + searchRequest.source().aggregation(AggregationBuilders.min(EARLIEST_TIME).field(Result.TIMESTAMP.getPreferredName())); + searchRequest.source().aggregation(AggregationBuilders.max(LATEST_TIME).field(Result.TIMESTAMP.getPreferredName())); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, + ActionListener.wrap(searchResponse -> { + long totalHits = searchResponse.getHits().getTotalHits(); + if (totalHits > 0) { + Aggregations aggregations = searchResponse.getAggregations(); + Min min = aggregations.get(EARLIEST_TIME); + long earliestTime = Intervals.alignToFloor((long) min.getValue(), maxBucketSpanMillis); + Max max = aggregations.get(LATEST_TIME); + long latestTime = Intervals.alignToCeil((long) max.getValue() + 1, maxBucketSpanMillis); + listener.onResponse(new ChunkedBucketSearcher(jobsContext, earliestTime, latestTime, request.isExcludeInterim(), + overallBucketsProvider, overallBucketsProcessor)); + } else { + listener.onResponse(null); + } + }, listener::onFailure), + client::search); + } + + private static class JobsContext { + private final int jobCount; + private final String[] indices; + private final TimeValue maxBucketSpan; + + private JobsContext(int jobCount, String[] indices, TimeValue maxBucketSpan) { + this.jobCount = jobCount; + this.indices = indices; + this.maxBucketSpan = maxBucketSpan; + } + + private static JobsContext build(List jobs, GetOverallBucketsAction.Request request) { + Set indices = new HashSet<>(); + TimeValue maxBucketSpan = TimeValue.ZERO; + for (Job job : jobs) { + indices.add(AnomalyDetectorsIndex.jobResultsAliasedName(job.getId())); + TimeValue bucketSpan = job.getAnalysisConfig().getBucketSpan(); + if (maxBucketSpan.compareTo(bucketSpan) < 0) { + maxBucketSpan = bucketSpan; + } + } + checkValidBucketSpan(request.getBucketSpan(), maxBucketSpan); + + // If top_n is 1, we can use the request bucket_span in order to optimize the aggregations + if (request.getBucketSpan() != null && (request.getTopN() == 1 || jobs.size() <= 1)) { + maxBucketSpan = request.getBucketSpan(); + } + + return new JobsContext(jobs.size(), indices.toArray(new String[indices.size()]), maxBucketSpan); + } + } + + private class ChunkedBucketSearcher { + + private static final int BUCKETS_PER_CHUNK = 1000; + private static final int MAX_RESULT_COUNT = 10000; + + private final String[] indices; + private final long maxBucketSpanMillis; + private final boolean excludeInterim; + private final long chunkMillis; + private final long endTime; + private volatile long curTime; + private final AggregationBuilder aggs; + private final OverallBucketsProvider overallBucketsProvider; + private final OverallBucketsProcessor overallBucketsProcessor; + + ChunkedBucketSearcher(JobsContext jobsContext, long startTime, long endTime, + boolean excludeInterim, OverallBucketsProvider overallBucketsProvider, + OverallBucketsProcessor overallBucketsProcessor) { + this.indices = jobsContext.indices; + this.maxBucketSpanMillis = jobsContext.maxBucketSpan.millis(); + this.chunkMillis = BUCKETS_PER_CHUNK * maxBucketSpanMillis; + this.endTime = endTime; + this.curTime = startTime; + this.excludeInterim = excludeInterim; + this.aggs = buildAggregations(maxBucketSpanMillis, jobsContext.jobCount); + this.overallBucketsProvider = overallBucketsProvider; + this.overallBucketsProcessor = overallBucketsProcessor; + } + + void searchAndComputeOverallBuckets(ActionListener> listener) { + if (curTime >= endTime) { + listener.onResponse(overallBucketsProcessor.finish()); + return; + } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, nextSearch(), + ActionListener.wrap(searchResponse -> { + Histogram histogram = searchResponse.getAggregations().get(Result.TIMESTAMP.getPreferredName()); + overallBucketsProcessor.process(overallBucketsProvider.computeOverallBuckets(histogram)); + if (overallBucketsProcessor.size() > MAX_RESULT_COUNT) { + listener.onFailure( + ExceptionsHelper.badRequestException("Unable to return more than [{}] results; please use " + + "parameters [{}] and [{}] to limit the time range", MAX_RESULT_COUNT, + GetOverallBucketsAction.Request.START, GetOverallBucketsAction.Request.END)); + return; + } + searchAndComputeOverallBuckets(listener); + }, listener::onFailure), + client::search); + } + + SearchRequest nextSearch() { + long curEnd = Math.min(curTime + chunkMillis, endTime); + logger.debug("Search for buckets in: [{}, {})", curTime, curEnd); + SearchRequest searchRequest = buildSearchRequest(curTime, curEnd, excludeInterim, maxBucketSpanMillis, indices); + searchRequest.source().aggregation(aggs); + curTime += chunkMillis; + return searchRequest; + } + } + + private static SearchRequest buildSearchRequest(Long start, Long end, boolean excludeInterim, long bucketSpanMillis, + String[] indices) { + String startTime = start == null ? null : String.valueOf(Intervals.alignToCeil(start, bucketSpanMillis)); + String endTime = end == null ? null : String.valueOf(Intervals.alignToFloor(end, bucketSpanMillis)); + + SearchSourceBuilder searchSourceBuilder = new BucketsQueryBuilder() + .size(0) + .includeInterim(excludeInterim == false) + .start(startTime) + .end(endTime) + .build(); + + SearchRequest searchRequest = new SearchRequest(indices); + searchRequest.indicesOptions(JobProvider.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); + searchRequest.source(searchSourceBuilder); + return searchRequest; + } + + private static AggregationBuilder buildAggregations(long maxBucketSpanMillis, int jobCount) { + AggregationBuilder overallScoreAgg = AggregationBuilders.max(OverallBucket.OVERALL_SCORE.getPreferredName()) + .field(Bucket.ANOMALY_SCORE.getPreferredName()); + AggregationBuilder jobsAgg = AggregationBuilders.terms(Job.ID.getPreferredName()) + .field(Job.ID.getPreferredName()).size(jobCount).subAggregation(overallScoreAgg); + AggregationBuilder interimAgg = AggregationBuilders.max(Result.IS_INTERIM.getPreferredName()) + .field(Result.IS_INTERIM.getPreferredName()); + return AggregationBuilders.dateHistogram(Result.TIMESTAMP.getPreferredName()) + .field(Result.TIMESTAMP.getPreferredName()) + .interval(maxBucketSpanMillis) + .subAggregation(jobsAgg) + .subAggregation(interimAgg); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java new file mode 100644 index 00000000000..ecd34bb2017 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder; + +public class TransportGetRecordsAction extends HandledTransportAction { + + private final JobProvider jobProvider; + private final JobManager jobManager; + private final Client client; + + @Inject + public TransportGetRecordsAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider, JobManager jobManager, Client client) { + super(settings, GetRecordsAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + GetRecordsAction.Request::new); + this.jobProvider = jobProvider; + this.jobManager = jobManager; + this.client = client; + } + + @Override + protected void doExecute(GetRecordsAction.Request request, ActionListener listener) { + + jobManager.getJobOrThrowIfUnknown(request.getJobId()); + + RecordsQueryBuilder query = new RecordsQueryBuilder() + .includeInterim(request.isExcludeInterim() == false) + .epochStart(request.getStart()) + .epochEnd(request.getEnd()) + .from(request.getPageParams().getFrom()) + .size(request.getPageParams().getSize()) + .recordScore(request.getRecordScoreFilter()) + .sortField(request.getSort()) + .sortDescending(request.isDescending()); + jobProvider.records(request.getJobId(), query, page -> + listener.onResponse(new GetRecordsAction.Response(page)), listener::onFailure, client); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java new file mode 100644 index 00000000000..45d9ff311ef --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; + +import java.io.IOException; +import java.util.List; + +public class TransportIsolateDatafeedAction extends TransportTasksAction { + + @Inject + public TransportIsolateDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService) { + super(settings, IsolateDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, + indexNameExpressionResolver, IsolateDatafeedAction.Request::new, IsolateDatafeedAction.Response::new, + MachineLearning.UTILITY_THREAD_POOL_NAME); + } + + @Override + protected void doExecute(Task task, IsolateDatafeedAction.Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(request.getDatafeedId(), tasks); + + if (datafeedTask == null || datafeedTask.getExecutorNode() == null) { + // No running datafeed task to isolate + listener.onResponse(new IsolateDatafeedAction.Response()); + return; + } + + String executorNode = datafeedTask.getExecutorNode(); + DiscoveryNodes nodes = state.nodes(); + if (nodes.resolveNode(executorNode).getVersion().before(Version.V_5_5_0)) { + listener.onFailure(new ElasticsearchException("Force delete datafeed is not supported because the datafeed task " + + "is running on a node [" + executorNode + "] with a version prior to " + Version.V_5_5_0)); + return; + } + + request.setNodes(datafeedTask.getExecutorNode()); + super.doExecute(task, request, listener); + } + + @Override + protected IsolateDatafeedAction.Response newResponse(IsolateDatafeedAction.Request request, List tasks, + List taskOperationFailures, + List failedNodeExceptions) { + if (taskOperationFailures.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper + .convertToElastic(taskOperationFailures.get(0).getCause()); + } else if (failedNodeExceptions.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper + .convertToElastic(failedNodeExceptions.get(0)); + } else { + return new IsolateDatafeedAction.Response(); + } + } + + @Override + protected void taskOperation(IsolateDatafeedAction.Request request, TransportStartDatafeedAction.DatafeedTask datafeedTask, + ActionListener listener) { + datafeedTask.isolate(); + listener.onResponse(new IsolateDatafeedAction.Response()); + } + + @Override + protected IsolateDatafeedAction.Response readTaskResponse(StreamInput in) throws IOException { + return new IsolateDatafeedAction.Response(in); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java index f2c5dc53d4d..1e5201b0e44 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java @@ -9,14 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; @@ -24,12 +21,10 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.job.JobManager; -import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import java.io.IOException; import java.util.List; import java.util.function.Supplier; @@ -38,8 +33,9 @@ import java.util.function.Supplier; */ // TODO: Hacking around here with TransportTasksAction. Ideally we should have another base class in core that // redirects to a single node only -public abstract class TransportJobTaskAction, - Response extends BaseTasksResponse & Writeable> extends TransportTasksAction { +public abstract class TransportJobTaskAction, + Response extends BaseTasksResponse & Writeable> + extends TransportTasksAction { protected final AutodetectProcessManager processManager; @@ -100,36 +96,4 @@ public abstract class TransportJobTaskAction> extends BaseTasksRequest { - - String jobId; - - JobTaskRequest() { - } - - JobTaskRequest(String jobId) { - this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public String getJobId() { - return jobId; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - jobId = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(jobId); - } - - @Override - public boolean match(Task task) { - return OpenJobAction.JobTask.match(task, jobId); - } - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java new file mode 100644 index 00000000000..5d055f5fb79 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.notifications.Auditor; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; + +import java.io.IOException; + +public class TransportKillProcessAction extends TransportJobTaskAction { + + private final Auditor auditor; + + @Inject + public TransportKillProcessAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ClusterService clusterService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + AutodetectProcessManager processManager, Auditor auditor) { + super(settings, KillProcessAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + KillProcessAction.Request::new, KillProcessAction.Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME, processManager); + this.auditor = auditor; + } + + @Override + protected void taskOperation(KillProcessAction.Request request, TransportOpenJobAction.JobTask jobTask, + ActionListener listener) { + logger.info("[{}] Killing job", jobTask.getJobId()); + auditor.info(jobTask.getJobId(), Messages.JOB_AUDIT_KILLING); + + try { + processManager.killProcess(jobTask, true, null); + listener.onResponse(new KillProcessAction.Response(true)); + } catch (Exception e) { + listener.onFailure(e); + } + } + + @Override + protected void doExecute(Task task, KillProcessAction.Request request, ActionListener listener) { + DiscoveryNodes nodes = clusterService.state().nodes(); + PersistentTasksCustomMetaData tasks = clusterService.state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + PersistentTasksCustomMetaData.PersistentTask jobTask = MlMetadata.getJobTask(request.getJobId(), tasks); + if (jobTask == null || jobTask.getExecutorNode() == null) { + logger.debug("[{}] Cannot kill the process because job is not open", request.getJobId()); + listener.onResponse(new KillProcessAction.Response(false)); + return; + } + + DiscoveryNode executorNode = nodes.get(jobTask.getExecutorNode()); + if (executorNode == null) { + listener.onFailure(ExceptionsHelper.conflictStatusException("Cannot kill process for job {} as" + + "executor node {} cannot be found", request.getJobId(), jobTask.getExecutorNode())); + return; + } + + Version nodeVersion = executorNode.getVersion(); + if (nodeVersion.before(Version.V_5_5_0)) { + listener.onFailure(new ElasticsearchException("Cannot kill the process on node with version " + nodeVersion)); + return; + } + + super.doExecute(task, request, listener); + } + + + @Override + protected KillProcessAction.Response readTaskResponse(StreamInput in) throws IOException { + return new KillProcessAction.Response(in); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java similarity index 50% rename from plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java rename to plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 2189bd258b0..8695d5be9c4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -11,18 +11,12 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.Version; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; -import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -34,18 +28,11 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; @@ -55,7 +42,8 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.MlMetadata; @@ -69,13 +57,9 @@ import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.persistent.AllocatedPersistentTask; -import org.elasticsearch.xpack.persistent.PersistentTaskParams; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.Assignment; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.xpack.persistent.PersistentTasksExecutor; import org.elasticsearch.xpack.persistent.PersistentTasksService; -import org.elasticsearch.xpack.persistent.PersistentTasksService.WaitForPersistentTaskStatusListener; import java.io.IOException; import java.util.ArrayList; @@ -84,7 +68,6 @@ import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.function.Predicate; @@ -92,599 +75,33 @@ import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE; -public class OpenJobAction extends Action { - - public static final OpenJobAction INSTANCE = new OpenJobAction(); - public static final String NAME = "cluster:admin/xpack/ml/job/open"; - public static final String TASK_NAME = "xpack/ml/job"; - - private OpenJobAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public Response newResponse() { - return new Response(); - } - - public static class Request extends MasterNodeRequest implements ToXContentObject { - - public static Request fromXContent(XContentParser parser) { - return parseRequest(null, parser); - } - - public static Request parseRequest(String jobId, XContentParser parser) { - JobParams jobParams = JobParams.PARSER.apply(parser, null); - if (jobId != null) { - jobParams.jobId = jobId; - } - return new Request(jobParams); - } - - private JobParams jobParams; - - public Request(JobParams jobParams) { - this.jobParams = jobParams; - } - - public Request(String jobId) { - this.jobParams = new JobParams(jobId); - } - - public Request(StreamInput in) throws IOException { - readFrom(in); - } - - Request() { - } - - public JobParams getJobParams() { - return jobParams; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - jobParams = new JobParams(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - jobParams.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - jobParams.toXContent(builder, params); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobParams); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || obj.getClass() != getClass()) { - return false; - } - OpenJobAction.Request other = (OpenJobAction.Request) obj; - return Objects.equals(jobParams, other.jobParams); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - public static class JobParams implements PersistentTaskParams { - - /** TODO Remove in 7.0.0 */ - public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime"); - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, JobParams::new); - - static { - PARSER.declareString(JobParams::setJobId, Job.ID); - PARSER.declareBoolean((p, v) -> {}, IGNORE_DOWNTIME); - PARSER.declareString((params, val) -> - params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - } - - public static JobParams fromXContent(XContentParser parser) { - return parseRequest(null, parser); - } - - public static JobParams parseRequest(String jobId, XContentParser parser) { - JobParams params = PARSER.apply(parser, null); - if (jobId != null) { - params.jobId = jobId; - } - return params; - } - - private String jobId; - // A big state can take a while to restore. For symmetry with the _close endpoint any - // changes here should be reflected there too. - private TimeValue timeout = MachineLearning.STATE_PERSIST_RESTORE_TIMEOUT; - - JobParams() { - } - - public JobParams(String jobId) { - this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public JobParams(StreamInput in) throws IOException { - jobId = in.readString(); - if (in.getVersion().onOrBefore(Version.V_5_5_0)) { - // Read `ignoreDowntime` - in.readBoolean(); - } - timeout = TimeValue.timeValueMillis(in.readVLong()); - } - - public String getJobId() { - return jobId; - } - - public void setJobId(String jobId) { - this.jobId = jobId; - } - - public TimeValue getTimeout() { - return timeout; - } - - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public String getWriteableName() { - return TASK_NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(jobId); - if (out.getVersion().onOrBefore(Version.V_5_5_0)) { - // Write `ignoreDowntime` - true by default - out.writeBoolean(true); - } - out.writeVLong(timeout.millis()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timeout); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || obj.getClass() != getClass()) { - return false; - } - OpenJobAction.JobParams other = (OpenJobAction.JobParams) obj; - return Objects.equals(jobId, other.jobId) && - Objects.equals(timeout, other.timeout); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - public static class Response extends AcknowledgedResponse { - public Response() { - super(); - } - - public Response(boolean acknowledged) { - super(acknowledged); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - readAcknowledged(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - writeAcknowledged(out); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AcknowledgedResponse that = (AcknowledgedResponse) o; - return isAcknowledged() == that.isAcknowledged(); - } - - @Override - public int hashCode() { - return Objects.hash(isAcknowledged()); - } - - } - - public static class JobTask extends AllocatedPersistentTask { - - private final String jobId; - private volatile AutodetectProcessManager autodetectProcessManager; - - JobTask(String jobId, long id, String type, String action, TaskId parentTask) { - super(id, type, action, "job-" + jobId, parentTask); - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - @Override - protected void onCancelled() { - String reason = getReasonCancelled(); - killJob(reason); - } - - void killJob(String reason) { - autodetectProcessManager.killProcess(this, false, reason); - } - - void closeJob(String reason) { - autodetectProcessManager.closeJob(this, false, reason); - } - - static boolean match(Task task, String expectedJobId) { - String expectedDescription = "job-" + expectedJobId; - return task instanceof JobTask && expectedDescription.equals(task.getDescription()); - } - - } - - static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client, OpenJobAction action) { - super(client, action, new Request()); - } - } - - // This class extends from TransportMasterNodeAction for cluster state observing purposes. - // The close job api also redirect the elected master node. - // The master node will wait for the job to be opened by checking the persistent task's status and then return. - // To ensure that a subsequent close job call will see that same task status (and sanity validation doesn't fail) - // both open and close job apis redirect to the elected master node. - // In case of instability persistent tasks checks may fail and that is ok, in that case all bets are off. - // The open job api is a low through put api, so the fact that we redirect to elected master node shouldn't be an issue. - public static class TransportAction extends TransportMasterNodeAction { - - private final XPackLicenseState licenseState; - private final PersistentTasksService persistentTasksService; - private final Client client; - private final JobProvider jobProvider; - - @Inject - public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, XPackLicenseState licenseState, - ClusterService clusterService, PersistentTasksService persistentTasksService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, Client client, JobProvider jobProvider) { - super(settings, NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, Request::new); - this.licenseState = licenseState; - this.persistentTasksService = persistentTasksService; - this.client = client; - this.jobProvider = jobProvider; - } - - @Override - protected String executor() { - // This api doesn't do heavy or blocking operations (just delegates PersistentTasksService), - // so we can do this on the network thread - return ThreadPool.Names.SAME; - } - - @Override - protected Response newResponse() { - return new Response(); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - // We only delegate here to PersistentTasksService, but if there is a metadata writeblock, - // then delegating to PersistentTasksService doesn't make a whole lot of sense, - // because PersistentTasksService will then fail. - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { - JobParams jobParams = request.getJobParams(); - if (licenseState.isMachineLearningAllowed()) { - // Step 5. Wait for job to be started and respond - ActionListener> finalListener = new ActionListener>() { - @Override - public void onResponse(PersistentTask task) { - waitForJobStarted(task.getId(), jobParams, listener); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof ResourceAlreadyExistsException) { - e = new ElasticsearchStatusException("Cannot open job [" + jobParams.getJobId() + - "] because it has already been opened", RestStatus.CONFLICT, e); - } - listener.onFailure(e); - } - }; - - // Step 4. Start job task - ActionListener establishedMemoryUpdateListener = ActionListener.wrap( - response -> persistentTasksService.startPersistentTask(MlMetadata.jobTaskId(jobParams.jobId), - TASK_NAME, jobParams, finalListener), - listener::onFailure - ); - - // Step 3. Update established model memory for pre-6.1 jobs that haven't had it set - ActionListener missingMappingsListener = ActionListener.wrap( - response -> { - MlMetadata mlMetadata = clusterService.state().getMetaData().custom(MlMetadata.TYPE); - Job job = mlMetadata.getJobs().get(jobParams.getJobId()); - if (job != null) { - Version jobVersion = job.getJobVersion(); - Long jobEstablishedModelMemory = job.getEstablishedModelMemory(); - if ((jobVersion == null || jobVersion.before(Version.V_6_1_0)) - && (jobEstablishedModelMemory == null || jobEstablishedModelMemory == 0)) { - jobProvider.getEstablishedMemoryUsage(job.getId(), null, null, establishedModelMemory -> { - if (establishedModelMemory != null && establishedModelMemory > 0) { - JobUpdate update = new JobUpdate.Builder(job.getId()) - .setEstablishedModelMemory(establishedModelMemory).build(); - UpdateJobAction.Request updateRequest = new UpdateJobAction.Request(job.getId(), update); - - executeAsyncWithOrigin(client, ML_ORIGIN, UpdateJobAction.INSTANCE, updateRequest, - establishedMemoryUpdateListener); - } else { - establishedMemoryUpdateListener.onResponse(null); - } - }, listener::onFailure); - } else { - establishedMemoryUpdateListener.onResponse(null); - } - } else { - establishedMemoryUpdateListener.onResponse(null); - } - }, listener::onFailure - ); - - // Step 2. Try adding state doc mapping - ActionListener resultsPutMappingHandler = ActionListener.wrap( - response -> { - addDocMappingIfMissing(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings::stateMapping, - state, missingMappingsListener); - }, listener::onFailure - ); - - // Step 1. Try adding results doc mapping - addDocMappingIfMissing(AnomalyDetectorsIndex.jobResultsAliasedName(jobParams.jobId), ElasticsearchMappings::docMapping, - state, resultsPutMappingHandler); - } else { - listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.MACHINE_LEARNING)); - } - } - - void waitForJobStarted(String taskId, JobParams jobParams, ActionListener listener) { - JobPredicate predicate = new JobPredicate(); - persistentTasksService.waitForPersistentTaskStatus(taskId, predicate, jobParams.timeout, - new WaitForPersistentTaskStatusListener() { - @Override - public void onResponse(PersistentTask persistentTask) { - if (predicate.exception != null) { - listener.onFailure(predicate.exception); - } else { - listener.onResponse(new Response(predicate.opened)); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - - @Override - public void onTimeout(TimeValue timeout) { - listener.onFailure(new ElasticsearchException("Opening job [" - + jobParams.getJobId() + "] timed out after [" + timeout + "]")); - } - }); - } - - private void addDocMappingIfMissing(String alias, CheckedSupplier mappingSupplier, ClusterState state, - ActionListener listener) { - AliasOrIndex aliasOrIndex = state.metaData().getAliasAndIndexLookup().get(alias); - if (aliasOrIndex == null) { - // The index has never been created yet - listener.onResponse(true); - return; - } - String[] concreteIndices = aliasOrIndex.getIndices().stream().map(IndexMetaData::getIndex).map(Index::getName) - .toArray(String[]::new); - - String[] indicesThatRequireAnUpdate; - try { - indicesThatRequireAnUpdate = mappingRequiresUpdate(state, concreteIndices, Version.CURRENT, logger); - } catch (IOException e) { - listener.onFailure(e); - return; - } - - if (indicesThatRequireAnUpdate.length > 0) { - try (XContentBuilder mapping = mappingSupplier.get()) { - PutMappingRequest putMappingRequest = new PutMappingRequest(indicesThatRequireAnUpdate); - putMappingRequest.type(ElasticsearchMappings.DOC_TYPE); - putMappingRequest.source(mapping); - executeAsyncWithOrigin(client, ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, - ActionListener.wrap(response -> { - if (response.isAcknowledged()) { - listener.onResponse(true); - } else { - listener.onFailure(new ElasticsearchException("Attempt to put missing mapping in indices " - + Arrays.toString(indicesThatRequireAnUpdate) + " was not acknowledged")); - } - }, listener::onFailure)); - } catch (IOException e) { - listener.onFailure(e); - } - } else { - logger.trace("Mappings are uptodate."); - listener.onResponse(true); - } - } - - /** - * Important: the methods of this class must NOT throw exceptions. If they did then the callers - * of endpoints waiting for a condition tested by this predicate would never get a response. - */ - private class JobPredicate implements Predicate> { - - private volatile boolean opened; - private volatile Exception exception; - - @Override - public boolean test(PersistentTask persistentTask) { - JobState jobState = JobState.CLOSED; - if (persistentTask != null) { - JobTaskStatus jobStateStatus = (JobTaskStatus) persistentTask.getStatus(); - jobState = jobStateStatus == null ? JobState.OPENING : jobStateStatus.getState(); - } - switch (jobState) { - case OPENING: - case CLOSED: - return false; - case OPENED: - opened = true; - return true; - case CLOSING: - exception = ExceptionsHelper.conflictStatusException("The job has been " + JobState.CLOSED + " while waiting to be " - + JobState.OPENED); - return true; - case FAILED: - default: - exception = ExceptionsHelper.serverError("Unexpected job state [" + jobState - + "] while waiting for job to be " + JobState.OPENED); - return true; - } - } - } - } - - public static class OpenJobPersistentTasksExecutor extends PersistentTasksExecutor { - - private final AutodetectProcessManager autodetectProcessManager; - - /** - * The maximum number of open jobs can be different on each node. However, nodes on older versions - * won't add their setting to the cluster state, so for backwards compatibility with these nodes we - * assume the older node's setting is the same as that of the node running this code. - * TODO: remove this member in 7.0 - */ - private final int fallbackMaxNumberOfOpenJobs; - private volatile int maxConcurrentJobAllocations; - private volatile int maxMachineMemoryPercent; - - public OpenJobPersistentTasksExecutor(Settings settings, ClusterService clusterService, - AutodetectProcessManager autodetectProcessManager) { - super(settings, TASK_NAME, MachineLearning.UTILITY_THREAD_POOL_NAME); - this.autodetectProcessManager = autodetectProcessManager; - this.fallbackMaxNumberOfOpenJobs = AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE.get(settings); - this.maxConcurrentJobAllocations = MachineLearning.CONCURRENT_JOB_ALLOCATIONS.get(settings); - this.maxMachineMemoryPercent = MachineLearning.MAX_MACHINE_MEMORY_PERCENT.get(settings); - clusterService.getClusterSettings() - .addSettingsUpdateConsumer(MachineLearning.CONCURRENT_JOB_ALLOCATIONS, this::setMaxConcurrentJobAllocations); - clusterService.getClusterSettings() - .addSettingsUpdateConsumer(MachineLearning.MAX_MACHINE_MEMORY_PERCENT, this::setMaxMachineMemoryPercent); - } - - @Override - public Assignment getAssignment(JobParams params, ClusterState clusterState) { - return selectLeastLoadedMlNode(params.getJobId(), clusterState, maxConcurrentJobAllocations, fallbackMaxNumberOfOpenJobs, - maxMachineMemoryPercent, logger); - } - - @Override - public void validate(JobParams params, ClusterState clusterState) { - // If we already know that we can't find an ml node because all ml nodes are running at capacity or - // simply because there are no ml nodes in the cluster then we fail quickly here: - MlMetadata mlMetadata = clusterState.metaData().custom(MlMetadata.TYPE); - OpenJobAction.validate(params.getJobId(), mlMetadata); - Assignment assignment = selectLeastLoadedMlNode(params.getJobId(), clusterState, maxConcurrentJobAllocations, - fallbackMaxNumberOfOpenJobs, maxMachineMemoryPercent, logger); - if (assignment.getExecutorNode() == null) { - String msg = "Could not open job because no suitable nodes were found, allocation explanation [" - + assignment.getExplanation() + "]"; - logger.warn("[{}] {}", params.getJobId(), msg); - throw new ElasticsearchStatusException(msg, RestStatus.TOO_MANY_REQUESTS); - } - } - - @Override - protected void nodeOperation(AllocatedPersistentTask task, JobParams params, Task.Status status) { - JobTask jobTask = (JobTask) task; - jobTask.autodetectProcessManager = autodetectProcessManager; - autodetectProcessManager.openJob(jobTask, e2 -> { - if (e2 == null) { - task.markAsCompleted(); - } else { - task.markAsFailed(e2); - } - }); - } - - @Override - protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, - PersistentTask persistentTask) { - return new JobTask(persistentTask.getParams().getJobId(), id, type, action, parentTaskId); - } - - void setMaxConcurrentJobAllocations(int maxConcurrentJobAllocations) { - logger.info("Changing [{}] from [{}] to [{}]", MachineLearning.CONCURRENT_JOB_ALLOCATIONS.getKey(), - this.maxConcurrentJobAllocations, maxConcurrentJobAllocations); - this.maxConcurrentJobAllocations = maxConcurrentJobAllocations; - } - - void setMaxMachineMemoryPercent(int maxMachineMemoryPercent) { - logger.info("Changing [{}] from [{}] to [{}]", MachineLearning.MAX_MACHINE_MEMORY_PERCENT.getKey(), - this.maxMachineMemoryPercent, maxMachineMemoryPercent); - this.maxMachineMemoryPercent = maxMachineMemoryPercent; - } +/* + This class extends from TransportMasterNodeAction for cluster state observing purposes. + The close job api also redirect the elected master node. + The master node will wait for the job to be opened by checking the persistent task's status and then return. + To ensure that a subsequent close job call will see that same task status (and sanity validation doesn't fail) + both open and close job apis redirect to the elected master node. + In case of instability persistent tasks checks may fail and that is ok, in that case all bets are off. + The open job api is a low through put api, so the fact that we redirect to elected master node shouldn't be an issue. +*/ +public class TransportOpenJobAction extends TransportMasterNodeAction { + + private final XPackLicenseState licenseState; + private final PersistentTasksService persistentTasksService; + private final Client client; + private final JobProvider jobProvider; + + @Inject + public TransportOpenJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + XPackLicenseState licenseState, ClusterService clusterService, + PersistentTasksService persistentTasksService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, Client client, JobProvider jobProvider) { + super(settings, OpenJobAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, + OpenJobAction.Request::new); + this.licenseState = licenseState; + this.persistentTasksService = persistentTasksService; + this.client = client; + this.jobProvider = jobProvider; } /** @@ -709,14 +126,16 @@ public class OpenJobAction extends Action unavailableIndices = verifyIndicesPrimaryShardsAreActive(jobId, clusterState); if (unavailableIndices.size() != 0) { String reason = "Not opening job [" + jobId + "], because not all primary shards are active for the following indices [" + String.join(",", unavailableIndices) + "]"; logger.debug(reason); - return new Assignment(null, reason); + return new PersistentTasksCustomMetaData.Assignment(null, reason); } List reasons = new LinkedList<>(); @@ -738,7 +157,7 @@ public class OpenJobAction extends Action compatibleJobTypes = Job.getCompatibleJobTypes(node.getVersion()); if (compatibleJobTypes.contains(job.getJobType()) == false) { @@ -762,17 +181,18 @@ public class OpenJobAction extends Action> assignedTasks = persistentTasks.findTasks(OpenJobAction.TASK_NAME, + Collection> assignedTasks = + persistentTasks.findTasks(OpenJobAction.TASK_NAME, task -> node.getId().equals(task.getExecutorNode())); numberOfAssignedJobs = assignedTasks.size(); - for (PersistentTask assignedTask : assignedTasks) { + for (PersistentTasksCustomMetaData.PersistentTask assignedTask : assignedTasks) { JobTaskStatus jobTaskState = (JobTaskStatus) assignedTask.getStatus(); if (jobTaskState == null || // executor node didn't have the chance to set job status to OPENING // previous executor node failed and current executor node didn't have the chance to set job status to OPENING jobTaskState.isStatusStale(assignedTask)) { ++numberOfAllocatingJobs; } - String assignedJobId = ((JobParams) assignedTask.getParams()).getJobId(); + String assignedJobId = ((OpenJobAction.JobParams) assignedTask.getParams()).getJobId(); Job assignedJob = mlMetadata.getJobs().get(assignedJobId); assert assignedJob != null; assignedJobMemory += assignedJob.estimateMemoryFootprint(); @@ -861,11 +281,11 @@ public class OpenJobAction extends Action listener) { + OpenJobAction.JobParams jobParams = request.getJobParams(); + if (licenseState.isMachineLearningAllowed()) { + // Step 5. Wait for job to be started and respond + ActionListener> finalListener = + new ActionListener>() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { + waitForJobStarted(task.getId(), jobParams, listener); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ResourceAlreadyExistsException) { + e = new ElasticsearchStatusException("Cannot open job [" + jobParams.getJobId() + + "] because it has already been opened", RestStatus.CONFLICT, e); + } + listener.onFailure(e); + } + }; + + // Step 4. Start job task + ActionListener establishedMemoryUpdateListener = ActionListener.wrap( + response -> persistentTasksService.startPersistentTask(MlMetadata.jobTaskId(jobParams.getJobId()), + OpenJobAction.TASK_NAME, jobParams, finalListener), + listener::onFailure + ); + + // Step 3. Update established model memory for pre-6.1 jobs that haven't had it set + ActionListener missingMappingsListener = ActionListener.wrap( + response -> { + MlMetadata mlMetadata = clusterService.state().getMetaData().custom(MLMetadataField.TYPE); + Job job = mlMetadata.getJobs().get(jobParams.getJobId()); + if (job != null) { + Version jobVersion = job.getJobVersion(); + Long jobEstablishedModelMemory = job.getEstablishedModelMemory(); + if ((jobVersion == null || jobVersion.before(Version.V_6_1_0)) + && (jobEstablishedModelMemory == null || jobEstablishedModelMemory == 0)) { + jobProvider.getEstablishedMemoryUsage(job.getId(), null, null, establishedModelMemory -> { + if (establishedModelMemory != null && establishedModelMemory > 0) { + JobUpdate update = new JobUpdate.Builder(job.getId()) + .setEstablishedModelMemory(establishedModelMemory).build(); + UpdateJobAction.Request updateRequest = new UpdateJobAction.Request(job.getId(), update); + + executeAsyncWithOrigin(client, ML_ORIGIN, UpdateJobAction.INSTANCE, updateRequest, + establishedMemoryUpdateListener); + } else { + establishedMemoryUpdateListener.onResponse(null); + } + }, listener::onFailure); + } else { + establishedMemoryUpdateListener.onResponse(null); + } + } else { + establishedMemoryUpdateListener.onResponse(null); + } + }, listener::onFailure + ); + + // Step 2. Try adding state doc mapping + ActionListener resultsPutMappingHandler = ActionListener.wrap( + response -> { + addDocMappingIfMissing(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings::stateMapping, + state, missingMappingsListener); + }, listener::onFailure + ); + + // Step 1. Try adding results doc mapping + addDocMappingIfMissing(AnomalyDetectorsIndex.jobResultsAliasedName(jobParams.getJobId()), ElasticsearchMappings::docMapping, + state, resultsPutMappingHandler); + } else { + listener.onFailure(LicenseUtils.newComplianceException(XpackField.MACHINE_LEARNING)); + } + } + + void waitForJobStarted(String taskId, OpenJobAction.JobParams jobParams, ActionListener listener) { + JobPredicate predicate = new JobPredicate(); + persistentTasksService.waitForPersistentTaskStatus(taskId, predicate, jobParams.getTimeout(), + new PersistentTasksService.WaitForPersistentTaskStatusListener() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { + if (predicate.exception != null) { + listener.onFailure(predicate.exception); + } else { + listener.onResponse(new OpenJobAction.Response(predicate.opened)); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + + @Override + public void onTimeout(TimeValue timeout) { + listener.onFailure(new ElasticsearchException("Opening job [" + + jobParams.getJobId() + "] timed out after [" + timeout + "]")); + } + }); + } + + private void addDocMappingIfMissing(String alias, CheckedSupplier mappingSupplier, ClusterState state, + ActionListener listener) { + AliasOrIndex aliasOrIndex = state.metaData().getAliasAndIndexLookup().get(alias); + if (aliasOrIndex == null) { + // The index has never been created yet + listener.onResponse(true); + return; + } + String[] concreteIndices = aliasOrIndex.getIndices().stream().map(IndexMetaData::getIndex).map(Index::getName) + .toArray(String[]::new); + + String[] indicesThatRequireAnUpdate; + try { + indicesThatRequireAnUpdate = mappingRequiresUpdate(state, concreteIndices, Version.CURRENT, logger); + } catch (IOException e) { + listener.onFailure(e); + return; + } + + if (indicesThatRequireAnUpdate.length > 0) { + try (XContentBuilder mapping = mappingSupplier.get()) { + PutMappingRequest putMappingRequest = new PutMappingRequest(indicesThatRequireAnUpdate); + putMappingRequest.type(ElasticsearchMappings.DOC_TYPE); + putMappingRequest.source(mapping); + executeAsyncWithOrigin(client, ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, + ActionListener.wrap(response -> { + if (response.isAcknowledged()) { + listener.onResponse(true); + } else { + listener.onFailure(new ElasticsearchException("Attempt to put missing mapping in indices " + + Arrays.toString(indicesThatRequireAnUpdate) + " was not acknowledged")); + } + }, listener::onFailure)); + } catch (IOException e) { + listener.onFailure(e); + } + } else { + logger.trace("Mappings are uptodate."); + listener.onResponse(true); + } + } + + public static class OpenJobPersistentTasksExecutor extends PersistentTasksExecutor { + + private final AutodetectProcessManager autodetectProcessManager; + + /** + * The maximum number of open jobs can be different on each node. However, nodes on older versions + * won't add their setting to the cluster state, so for backwards compatibility with these nodes we + * assume the older node's setting is the same as that of the node running this code. + * TODO: remove this member in 7.0 + */ + private final int fallbackMaxNumberOfOpenJobs; + private volatile int maxConcurrentJobAllocations; + private volatile int maxMachineMemoryPercent; + + public OpenJobPersistentTasksExecutor(Settings settings, ClusterService clusterService, + AutodetectProcessManager autodetectProcessManager) { + super(settings, OpenJobAction.TASK_NAME, MachineLearning.UTILITY_THREAD_POOL_NAME); + this.autodetectProcessManager = autodetectProcessManager; + this.fallbackMaxNumberOfOpenJobs = AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE.get(settings); + this.maxConcurrentJobAllocations = MachineLearning.CONCURRENT_JOB_ALLOCATIONS.get(settings); + this.maxMachineMemoryPercent = MachineLearning.MAX_MACHINE_MEMORY_PERCENT.get(settings); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(MachineLearning.CONCURRENT_JOB_ALLOCATIONS, this::setMaxConcurrentJobAllocations); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(MachineLearning.MAX_MACHINE_MEMORY_PERCENT, this::setMaxMachineMemoryPercent); + } + + @Override + public PersistentTasksCustomMetaData.Assignment getAssignment(OpenJobAction.JobParams params, ClusterState clusterState) { + return selectLeastLoadedMlNode(params.getJobId(), clusterState, maxConcurrentJobAllocations, fallbackMaxNumberOfOpenJobs, + maxMachineMemoryPercent, logger); + } + + @Override + public void validate(OpenJobAction.JobParams params, ClusterState clusterState) { + // If we already know that we can't find an ml node because all ml nodes are running at capacity or + // simply because there are no ml nodes in the cluster then we fail quickly here: + MlMetadata mlMetadata = clusterState.metaData().custom(MLMetadataField.TYPE); + TransportOpenJobAction.validate(params.getJobId(), mlMetadata); + PersistentTasksCustomMetaData.Assignment assignment = selectLeastLoadedMlNode(params.getJobId(), clusterState, + maxConcurrentJobAllocations, fallbackMaxNumberOfOpenJobs, maxMachineMemoryPercent, logger); + if (assignment.getExecutorNode() == null) { + String msg = "Could not open job because no suitable nodes were found, allocation explanation [" + + assignment.getExplanation() + "]"; + logger.warn("[{}] {}", params.getJobId(), msg); + throw new ElasticsearchStatusException(msg, RestStatus.TOO_MANY_REQUESTS); + } + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, OpenJobAction.JobParams params, Task.Status status) { + JobTask jobTask = (JobTask) task; + jobTask.autodetectProcessManager = autodetectProcessManager; + autodetectProcessManager.openJob(jobTask, e2 -> { + if (e2 == null) { + task.markAsCompleted(); + } else { + task.markAsFailed(e2); + } + }); + } + + @Override + protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, + PersistentTasksCustomMetaData.PersistentTask persistentTask) { + return new JobTask(persistentTask.getParams().getJobId(), id, type, action, parentTaskId); + } + + void setMaxConcurrentJobAllocations(int maxConcurrentJobAllocations) { + logger.info("Changing [{}] from [{}] to [{}]", MachineLearning.CONCURRENT_JOB_ALLOCATIONS.getKey(), + this.maxConcurrentJobAllocations, maxConcurrentJobAllocations); + this.maxConcurrentJobAllocations = maxConcurrentJobAllocations; + } + + void setMaxMachineMemoryPercent(int maxMachineMemoryPercent) { + logger.info("Changing [{}] from [{}] to [{}]", MachineLearning.MAX_MACHINE_MEMORY_PERCENT.getKey(), + this.maxMachineMemoryPercent, maxMachineMemoryPercent); + this.maxMachineMemoryPercent = maxMachineMemoryPercent; + } + } + + public static class JobTask extends AllocatedPersistentTask implements OpenJobAction.JobTaskMatcher { + + private final String jobId; + private volatile AutodetectProcessManager autodetectProcessManager; + + JobTask(String jobId, long id, String type, String action, TaskId parentTask) { + super(id, type, action, "job-" + jobId, parentTask); + this.jobId = jobId; + } + + public String getJobId() { + return jobId; + } + + @Override + protected void onCancelled() { + String reason = getReasonCancelled(); + killJob(reason); + } + + void killJob(String reason) { + autodetectProcessManager.killProcess(this, false, reason); + } + + void closeJob(String reason) { + autodetectProcessManager.closeJob(this, false, reason); + } + + } + + /** + * Important: the methods of this class must NOT throw exceptions. If they did then the callers + * of endpoints waiting for a condition tested by this predicate would never get a response. + */ + private class JobPredicate implements Predicate> { + + private volatile boolean opened; + private volatile Exception exception; + + @Override + public boolean test(PersistentTasksCustomMetaData.PersistentTask persistentTask) { + JobState jobState = JobState.CLOSED; + if (persistentTask != null) { + JobTaskStatus jobStateStatus = (JobTaskStatus) persistentTask.getStatus(); + jobState = jobStateStatus == null ? JobState.OPENING : jobStateStatus.getState(); + } + switch (jobState) { + case OPENING: + case CLOSED: + return false; + case OPENED: + opened = true; + return true; + case CLOSING: + exception = ExceptionsHelper.conflictStatusException("The job has been " + JobState.CLOSED + " while waiting to be " + + JobState.OPENED); + return true; + case FAILED: + default: + exception = ExceptionsHelper.serverError("Unexpected job state [" + jobState + + "] while waiting for job to be " + JobState.OPENED); + return true; + } + } + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java new file mode 100644 index 00000000000..bb5406cadfe --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.calendars.SpecialEvent; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportPostCalendarEventsAction extends HandledTransportAction { + + private final Client client; + private final JobProvider jobProvider; + + @Inject + public TransportPostCalendarEventsAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client, JobProvider jobProvider) { + super(settings, PostCalendarEventsAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, PostCalendarEventsAction.Request::new); + this.client = client; + this.jobProvider = jobProvider; + } + + @Override + protected void doExecute(PostCalendarEventsAction.Request request, + ActionListener listener) { + List events = request.getSpecialEvents(); + + ActionListener calendarExistsListener = ActionListener.wrap( + r -> { + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + + for (SpecialEvent event: events) { + IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE); + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + indexRequest.source(event.toXContent(builder, + new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, + "true")))); + } catch (IOException e) { + throw new IllegalStateException("Failed to serialise special event", e); + } + bulkRequestBuilder.add(indexRequest); + } + + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse response) { + listener.onResponse(new PostCalendarEventsAction.Response(events)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure( + ExceptionsHelper.serverError("Error indexing special event", e)); + } + }); + }, + listener::onFailure); + + checkCalendarExists(request.getCalendarId(), calendarExistsListener); + } + + private void checkCalendarExists(String calendarId, ActionListener listener) { + jobProvider.calendar(calendarId, ActionListener.wrap( + c -> listener.onResponse(true), + listener::onFailure + )); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostDataAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostDataAction.java new file mode 100644 index 00000000000..4395233fddb --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostDataAction.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; + +import java.io.IOException; +import java.util.Optional; + +public class TransportPostDataAction extends TransportJobTaskAction { + + @Inject + public TransportPostDataAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ClusterService clusterService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + AutodetectProcessManager processManager) { + super(settings, PostDataAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + PostDataAction.Request::new, PostDataAction.Response::new, ThreadPool.Names.SAME, processManager); + // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread + } + + @Override + protected PostDataAction.Response readTaskResponse(StreamInput in) throws IOException { + PostDataAction.Response response = new PostDataAction.Response(); + response.readFrom(in); + return response; + } + + @Override + protected void taskOperation(PostDataAction.Request request, TransportOpenJobAction.JobTask task, + ActionListener listener) { + TimeRange timeRange = TimeRange.builder().startTime(request.getResetStart()).endTime(request.getResetEnd()).build(); + DataLoadParams params = new DataLoadParams(timeRange, Optional.ofNullable(request.getDataDescription())); + try { + processManager.processData(task, request.getContent().streamInput(), request.getXContentType(), params, (dataCounts, e) -> { + if (dataCounts != null) { + listener.onResponse(new PostDataAction.Response(dataCounts)); + } else { + listener.onFailure(e); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java new file mode 100644 index 00000000000..4c57e5b0daa --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlClientHelper; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.datafeed.ChunkingConfig; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +public class TransportPreviewDatafeedAction extends HandledTransportAction { + + private final Client client; + private final ClusterService clusterService; + + @Inject + public TransportPreviewDatafeedAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + Client client, ClusterService clusterService) { + super(settings, PreviewDatafeedAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + PreviewDatafeedAction.Request::new); + this.client = client; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(PreviewDatafeedAction.Request request, ActionListener listener) { + MlMetadata mlMetadata = clusterService.state().getMetaData().custom(MLMetadataField.TYPE); + DatafeedConfig datafeed = mlMetadata.getDatafeed(request.getDatafeedId()); + if (datafeed == null) { + throw ExceptionsHelper.missingDatafeedException(request.getDatafeedId()); + } + Job job = mlMetadata.getJobs().get(datafeed.getJobId()); + if (job == null) { + throw ExceptionsHelper.missingJobException(datafeed.getJobId()); + } + DatafeedConfig.Builder datafeedWithAutoChunking = new DatafeedConfig.Builder(datafeed); + datafeedWithAutoChunking.setChunkingConfig(ChunkingConfig.newAuto()); + Map headers = threadPool.getThreadContext().getHeaders().entrySet().stream() + .filter(e -> MlClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + datafeedWithAutoChunking.setHeaders(headers); + // NB: this is using the client from the transport layer, NOT the internal client. + // This is important because it means the datafeed search will fail if the user + // requesting the preview doesn't have permission to search the relevant indices. + DataExtractorFactory.create(client, datafeedWithAutoChunking.build(), job, new ActionListener() { + @Override + public void onResponse(DataExtractorFactory dataExtractorFactory) { + DataExtractor dataExtractor = dataExtractorFactory.newExtractor(0, Long.MAX_VALUE); + threadPool.generic().execute(() -> previewDatafeed(dataExtractor, listener)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + + } + + /** Visible for testing */ + static void previewDatafeed(DataExtractor dataExtractor, ActionListener listener) { + try { + Optional inputStream = dataExtractor.next(); + // DataExtractor returns single-line JSON but without newline characters between objects. + // Instead, it has a space between objects due to how JSON XContenetBuilder works. + // In order to return a proper JSON array from preview, we surround with square brackets and + // we stick in a comma between objects. + // Also, the stream is expected to be a single line but in case it is not, we join lines + // using space to ensure the comma insertion works correctly. + StringBuilder responseBuilder = new StringBuilder("["); + if (inputStream.isPresent()) { + try (BufferedReader buffer = new BufferedReader(new InputStreamReader(inputStream.get(), StandardCharsets.UTF_8))) { + responseBuilder.append(buffer.lines().collect(Collectors.joining(" ")).replace("} {", "},{")); + } + } + responseBuilder.append("]"); + listener.onResponse(new PreviewDatafeedAction.Response( + new BytesArray(responseBuilder.toString().getBytes(StandardCharsets.UTF_8)))); + } catch (Exception e) { + listener.onFailure(e); + } finally { + dataExtractor.cancel(); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java new file mode 100644 index 00000000000..1556ae47637 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.calendars.Calendar; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportPutCalendarAction extends HandledTransportAction { + + private final Client client; + private final ClusterService clusterService; + + @Inject + public TransportPutCalendarAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client, ClusterService clusterService) { + super(settings, PutCalendarAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, PutCalendarAction.Request::new); + this.client = client; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(PutCalendarAction.Request request, ActionListener listener) { + Calendar calendar = request.getCalendar(); + + checkJobsExist(calendar.getJobIds(), listener::onFailure); + + IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, calendar.documentId()); + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + indexRequest.source(calendar.toXContent(builder, + new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")))); + } catch (IOException e) { + throw new IllegalStateException("Failed to serialise calendar with id [" + calendar.getId() + "]", e); + } + + // Make it an error to overwrite an existing calendar + indexRequest.opType(DocWriteRequest.OpType.CREATE); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, + new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + listener.onResponse(new PutCalendarAction.Response(calendar)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure( + ExceptionsHelper.serverError("Error putting calendar with id [" + calendar.getId() + "]", e)); + } + }); + } + + private void checkJobsExist(List jobIds, Consumer errorHandler) { + ClusterState state = clusterService.state(); + MlMetadata mlMetadata = state.getMetaData().custom(MLMetadataField.TYPE); + for (String jobId: jobIds) { + Set jobs = mlMetadata.expandJobIds(jobId, true); + if (jobs.isEmpty()) { + errorHandler.accept(ExceptionsHelper.missingJobException(jobId)); + return; + } + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java new file mode 100644 index 00000000000..651de091a03 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.security.SecurityContext; +import org.elasticsearch.xpack.security.action.user.HasPrivilegesAction; +import org.elasticsearch.xpack.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.support.Exceptions; + +import java.io.IOException; + +public class TransportPutDatafeedAction extends TransportMasterNodeAction { + + private final XPackLicenseState licenseState; + private final Client client; + private final boolean securityEnabled; + private final SecurityContext securityContext; + + @Inject + public TransportPutDatafeedAction(Settings settings, TransportService transportService, + ClusterService clusterService, ThreadPool threadPool, Client client, + XPackLicenseState licenseState, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, PutDatafeedAction.NAME, transportService, clusterService, threadPool, + actionFilters, indexNameExpressionResolver, PutDatafeedAction.Request::new); + this.licenseState = licenseState; + this.client = client; + this.securityEnabled = XPackSettings.SECURITY_ENABLED.get(settings); + this.securityContext = securityEnabled ? new SecurityContext(settings, threadPool.getThreadContext()) : null; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected PutDatafeedAction.Response newResponse() { + return new PutDatafeedAction.Response(); + } + + @Override + protected void masterOperation(PutDatafeedAction.Request request, ClusterState state, + ActionListener listener) { + // If security is enabled only create the datafeed if the user requesting creation has + // permission to read the indices the datafeed is going to read from + if (securityEnabled) { + final String username = securityContext.getUser().principal(); + ActionListener privResponseListener = ActionListener.wrap( + r -> handlePrivsResponse(username, request, r, listener), + listener::onFailure); + + HasPrivilegesRequest privRequest = new HasPrivilegesRequest(); + privRequest.username(username); + privRequest.clusterPrivileges(Strings.EMPTY_ARRAY); + // We just check for permission to use the search action. In reality we'll also + // use the scroll action, but that's considered an implementation detail. + privRequest.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder() + .indices(request.getDatafeed().getIndices().toArray(new String[0])) + .privileges(SearchAction.NAME) + .build()); + + client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); + } else { + putDatafeed(request, listener); + } + } + + private void handlePrivsResponse(String username, PutDatafeedAction.Request request, + HasPrivilegesResponse response, + ActionListener listener) throws IOException { + if (response.isCompleteMatch()) { + putDatafeed(request, listener); + } else { + XContentBuilder builder = JsonXContent.contentBuilder(); + builder.startObject(); + for (HasPrivilegesResponse.IndexPrivileges index : response.getIndexPrivileges()) { + builder.field(index.getIndex()); + builder.map(index.getPrivileges()); + } + builder.endObject(); + + listener.onFailure(Exceptions.authorizationError("Cannot create datafeed [{}]" + + " because user {} lacks permissions on the indices to be" + + " searched: {}", + request.getDatafeed().getId(), username, builder.string())); + } + } + + private void putDatafeed(PutDatafeedAction.Request request, ActionListener listener) { + + clusterService.submitStateUpdateTask( + "put-datafeed-" + request.getDatafeed().getId(), + new AckedClusterStateUpdateTask(request, listener) { + + @Override + protected PutDatafeedAction.Response newResponse(boolean acknowledged) { + if (acknowledged) { + logger.info("Created datafeed [{}]", request.getDatafeed().getId()); + } + return new PutDatafeedAction.Response(acknowledged, request.getDatafeed()); + } + + @Override + public ClusterState execute(ClusterState currentState) { + return putDatafeed(request, currentState); + } + }); + } + + private ClusterState putDatafeed(PutDatafeedAction.Request request, ClusterState clusterState) { + MlMetadata currentMetadata = clusterState.getMetaData().custom(MLMetadataField.TYPE); + MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) + .putDatafeed(request.getDatafeed(), threadPool.getThreadContext()).build(); + return ClusterState.builder(clusterState).metaData( + MetaData.builder(clusterState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()) + .build(); + } + + @Override + protected ClusterBlockException checkBlock(PutDatafeedAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected void doExecute(Task task, PutDatafeedAction.Request request, ActionListener listener) { + if (licenseState.isMachineLearningAllowed()) { + super.doExecute(task, request, listener); + } else { + listener.onFailure(LicenseUtils.newComplianceException(XpackField.MACHINE_LEARNING)); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java new file mode 100644 index 00000000000..046a2cea951 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.job.config.MlFilter; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportPutFilterAction extends HandledTransportAction { + + private final Client client; + + @Inject + public TransportPutFilterAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client) { + super(settings, PutFilterAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, PutFilterAction.Request::new); + this.client = client; + } + + @Override + protected void doExecute(PutFilterAction.Request request, ActionListener listener) { + MlFilter filter = request.getFilter(); + IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); + indexRequest.source(filter.toXContent(builder, params)); + } catch (IOException e) { + throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); + } + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + bulkRequestBuilder.add(indexRequest); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse indexResponse) { + listener.onResponse(new PutFilterAction.Response()); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(ExceptionsHelper.serverError("Error putting filter with id [" + filter.getId() + "]", e)); + } + }); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java new file mode 100644 index 00000000000..1e97fcf4172 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.XpackField; +import org.elasticsearch.xpack.ml.job.JobManager; + +public class TransportPutJobAction extends TransportMasterNodeAction { + + private final JobManager jobManager; + private final XPackLicenseState licenseState; + + @Inject + public TransportPutJobAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, XPackLicenseState licenseState, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager) { + super(settings, PutJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, PutJobAction.Request::new); + this.licenseState = licenseState; + this.jobManager = jobManager; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected PutJobAction.Response newResponse() { + return new PutJobAction.Response(); + } + + @Override + protected void masterOperation(PutJobAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + jobManager.putJob(request, state, listener); + } + + @Override + protected ClusterBlockException checkBlock(PutJobAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + @Override + protected void doExecute(Task task, PutJobAction.Request request, ActionListener listener) { + if (licenseState.isMachineLearningAllowed()) { + super.doExecute(task, request, listener); + } else { + listener.onFailure(LicenseUtils.newComplianceException(XpackField.MACHINE_LEARNING)); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java new file mode 100644 index 00000000000..efe3d790b98 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -0,0 +1,161 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; +import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.util.Date; +import java.util.function.Consumer; + +public class TransportRevertModelSnapshotAction extends TransportMasterNodeAction { + + private final Client client; + private final JobManager jobManager; + private final JobProvider jobProvider; + private final JobDataCountsPersister jobDataCountsPersister; + + @Inject + public TransportRevertModelSnapshotAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + JobManager jobManager, JobProvider jobProvider, + ClusterService clusterService, Client client, JobDataCountsPersister jobDataCountsPersister) { + super(settings, RevertModelSnapshotAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, RevertModelSnapshotAction.Request::new); + this.client = client; + this.jobManager = jobManager; + this.jobProvider = jobProvider; + this.jobDataCountsPersister = jobDataCountsPersister; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected RevertModelSnapshotAction.Response newResponse() { + return new RevertModelSnapshotAction.Response(); + } + + @Override + protected void masterOperation(RevertModelSnapshotAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + logger.debug("Received request to revert to snapshot id '{}' for job '{}', deleting intervening results: {}", + request.getSnapshotId(), request.getJobId(), request.getDeleteInterveningResults()); + + Job job = JobManager.getJobOrThrowIfUnknown(request.getJobId(), clusterService.state()); + JobState jobState = jobManager.getJobState(job.getId()); + if (jobState.equals(JobState.CLOSED) == false) { + throw ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.REST_JOB_NOT_CLOSED_REVERT)); + } + + getModelSnapshot(request, jobProvider, modelSnapshot -> { + ActionListener wrappedListener = listener; + if (request.getDeleteInterveningResults()) { + wrappedListener = wrapDeleteOldDataListener(wrappedListener, modelSnapshot, request.getJobId()); + wrappedListener = wrapRevertDataCountsListener(wrappedListener, modelSnapshot, request.getJobId()); + } + jobManager.revertSnapshot(request, wrappedListener, modelSnapshot); + }, listener::onFailure); + } + + private void getModelSnapshot(RevertModelSnapshotAction.Request request, JobProvider provider, Consumer handler, + Consumer errorHandler) { + logger.info("Reverting to snapshot '" + request.getSnapshotId() + "'"); + + provider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { + if (modelSnapshot == null) { + throw new ResourceNotFoundException(Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, request.getSnapshotId(), + request.getJobId())); + } + handler.accept(modelSnapshot.result); + }, errorHandler); + } + + private ActionListener wrapDeleteOldDataListener( + ActionListener listener, + ModelSnapshot modelSnapshot, String jobId) { + + // If we need to delete buckets that occurred after the snapshot, we + // wrap the listener with one that invokes the OldDataRemover on + // acknowledged responses + return ActionListener.wrap(response -> { + if (response.isAcknowledged()) { + Date deleteAfter = modelSnapshot.getLatestResultTimeStamp(); + logger.debug("Removing intervening records: last record: " + deleteAfter + ", last result: " + + modelSnapshot.getLatestResultTimeStamp()); + + logger.info("Deleting results after '" + deleteAfter + "'"); + + JobDataDeleter dataDeleter = new JobDataDeleter(client, jobId); + dataDeleter.deleteResultsFromTime(deleteAfter.getTime() + 1, new ActionListener() { + @Override + public void onResponse(Boolean success) { + listener.onResponse(response); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + }, listener::onFailure); + } + + private ActionListener wrapRevertDataCountsListener( + ActionListener listener, + ModelSnapshot modelSnapshot, String jobId) { + + + return ActionListener.wrap(response -> { + if (response.isAcknowledged()) { + jobProvider.dataCounts(jobId, counts -> { + counts.setLatestRecordTimeStamp(modelSnapshot.getLatestRecordTimeStamp()); + jobDataCountsPersister.persistDataCounts(jobId, counts, new ActionListener() { + @Override + public void onResponse(Boolean aBoolean) { + listener.onResponse(response); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + }, listener::onFailure); + } + }, listener::onFailure); + } + + @Override + protected ClusterBlockException checkBlock(RevertModelSnapshotAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java new file mode 100644 index 00000000000..96761189ccd --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java @@ -0,0 +1,283 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.XpackField; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator; +import org.elasticsearch.xpack.ml.datafeed.DatafeedManager; +import org.elasticsearch.xpack.ml.datafeed.DatafeedNodeSelector; +import org.elasticsearch.xpack.ml.datafeed.DatafeedState; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; +import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.persistent.AllocatedPersistentTask; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.persistent.PersistentTasksExecutor; +import org.elasticsearch.xpack.persistent.PersistentTasksService; + +import java.util.function.Predicate; + +/* This class extends from TransportMasterNodeAction for cluster state observing purposes. + The stop datafeed api also redirect the elected master node. + The master node will wait for the datafeed to be started by checking the persistent task's status and then return. + To ensure that a subsequent stop datafeed call will see that same task status (and sanity validation doesn't fail) + both start and stop datafeed apis redirect to the elected master node. + In case of instability persistent tasks checks may fail and that is ok, in that case all bets are off. + The start datafeed api is a low through put api, so the fact that we redirect to elected master node shouldn't be an issue. + */ +public class TransportStartDatafeedAction extends TransportMasterNodeAction { + + private final Client client; + private final XPackLicenseState licenseState; + private final PersistentTasksService persistentTasksService; + + @Inject + public TransportStartDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ClusterService clusterService, XPackLicenseState licenseState, + PersistentTasksService persistentTasksService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + Client client) { + super(settings, StartDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, + StartDatafeedAction.Request::new); + this.licenseState = licenseState; + this.persistentTasksService = persistentTasksService; + this.client = client; + } + + static void validate(String datafeedId, MlMetadata mlMetadata, PersistentTasksCustomMetaData tasks) { + DatafeedConfig datafeed = (mlMetadata == null) ? null : mlMetadata.getDatafeed(datafeedId); + if (datafeed == null) { + throw ExceptionsHelper.missingDatafeedException(datafeedId); + } + Job job = mlMetadata.getJobs().get(datafeed.getJobId()); + if (job == null) { + throw ExceptionsHelper.missingJobException(datafeed.getJobId()); + } + DatafeedJobValidator.validate(datafeed, job); + JobState jobState = MlMetadata.getJobState(datafeed.getJobId(), tasks); + if (jobState.isAnyOf(JobState.OPENING, JobState.OPENED) == false) { + throw ExceptionsHelper.conflictStatusException("cannot start datafeed [" + datafeedId + "] because job [" + job.getId() + + "] is " + jobState); + } + } + + @Override + protected String executor() { + // This api doesn't do heavy or blocking operations (just delegates PersistentTasksService), + // so we can do this on the network thread + return ThreadPool.Names.SAME; + } + + @Override + protected StartDatafeedAction.Response newResponse() { + return new StartDatafeedAction.Response(); + } + + @Override + protected void masterOperation(StartDatafeedAction.Request request, ClusterState state, + ActionListener listener) { + StartDatafeedAction.DatafeedParams params = request.getParams(); + if (licenseState.isMachineLearningAllowed()) { + ActionListener> finalListener = + new ActionListener>() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { + waitForDatafeedStarted(persistentTask.getId(), params, listener); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ResourceAlreadyExistsException) { + logger.debug(e); + e = new ElasticsearchStatusException("cannot start datafeed [" + params.getDatafeedId() + + "] because it has already been started", RestStatus.CONFLICT); + } + listener.onFailure(e); + } + }; + + // Verify data extractor factory can be created, then start persistent task + MlMetadata mlMetadata = state.metaData().custom(MLMetadataField.TYPE); + PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + validate(params.getDatafeedId(), mlMetadata, tasks); + DatafeedConfig datafeed = mlMetadata.getDatafeed(params.getDatafeedId()); + Job job = mlMetadata.getJobs().get(datafeed.getJobId()); + DataExtractorFactory.create(client, datafeed, job, ActionListener.wrap( + dataExtractorFactory -> + persistentTasksService.startPersistentTask(MLMetadataField.datafeedTaskId(params.getDatafeedId()), + StartDatafeedAction.TASK_NAME, params, finalListener) + , listener::onFailure)); + } else { + listener.onFailure(LicenseUtils.newComplianceException(XpackField.MACHINE_LEARNING)); + } + } + + @Override + protected ClusterBlockException checkBlock(StartDatafeedAction.Request request, ClusterState state) { + // We only delegate here to PersistentTasksService, but if there is a metadata writeblock, + // then delagating to PersistentTasksService doesn't make a whole lot of sense, + // because PersistentTasksService will then fail. + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } + + void waitForDatafeedStarted(String taskId, StartDatafeedAction.DatafeedParams params, + ActionListener listener) { + Predicate> predicate = persistentTask -> { + if (persistentTask == null) { + return false; + } + DatafeedState datafeedState = (DatafeedState) persistentTask.getStatus(); + return datafeedState == DatafeedState.STARTED; + }; + persistentTasksService.waitForPersistentTaskStatus(taskId, predicate, params.getTimeout(), + new PersistentTasksService.WaitForPersistentTaskStatusListener() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { + listener.onResponse(new StartDatafeedAction.Response(true)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + + @Override + public void onTimeout(TimeValue timeout) { + listener.onFailure(new ElasticsearchException("Starting datafeed [" + + params.getDatafeedId() + "] timed out after [" + timeout + "]")); + } + }); + } + + public static class StartDatafeedPersistentTasksExecutor extends PersistentTasksExecutor { + private final DatafeedManager datafeedManager; + private final IndexNameExpressionResolver resolver; + + public StartDatafeedPersistentTasksExecutor(Settings settings, DatafeedManager datafeedManager) { + super(settings, StartDatafeedAction.TASK_NAME, MachineLearning.UTILITY_THREAD_POOL_NAME); + this.datafeedManager = datafeedManager; + this.resolver = new IndexNameExpressionResolver(settings); + } + + @Override + public PersistentTasksCustomMetaData.Assignment getAssignment(StartDatafeedAction.DatafeedParams params, + ClusterState clusterState) { + return new DatafeedNodeSelector(clusterState, resolver, params.getDatafeedId()).selectNode(); + } + + @Override + public void validate(StartDatafeedAction.DatafeedParams params, ClusterState clusterState) { + MlMetadata mlMetadata = clusterState.metaData().custom(MLMetadataField.TYPE); + PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + TransportStartDatafeedAction.validate(params.getDatafeedId(), mlMetadata, tasks); + new DatafeedNodeSelector(clusterState, resolver, params.getDatafeedId()).checkDatafeedTaskCanBeCreated(); + } + + @Override + protected void nodeOperation(AllocatedPersistentTask allocatedPersistentTask, StartDatafeedAction.DatafeedParams params, + Task.Status status) { + DatafeedTask datafeedTask = (DatafeedTask) allocatedPersistentTask; + datafeedTask.datafeedManager = datafeedManager; + datafeedManager.run(datafeedTask, + (error) -> { + if (error != null) { + datafeedTask.markAsFailed(error); + } else { + datafeedTask.markAsCompleted(); + } + }); + } + + @Override + protected AllocatedPersistentTask createTask( + long id, String type, String action, TaskId parentTaskId, + PersistentTasksCustomMetaData.PersistentTask persistentTask) { + return new DatafeedTask(id, type, action, parentTaskId, persistentTask.getParams()); + } + } + + public static class DatafeedTask extends AllocatedPersistentTask implements StartDatafeedAction.DatafeedTaskMatcher { + + private final String datafeedId; + private final long startTime; + private final Long endTime; + /* only pck protected for testing */ + volatile DatafeedManager datafeedManager; + + DatafeedTask(long id, String type, String action, TaskId parentTaskId, StartDatafeedAction.DatafeedParams params) { + super(id, type, action, "datafeed-" + params.getDatafeedId(), parentTaskId); + this.datafeedId = params.getDatafeedId(); + this.startTime = params.getStartTime(); + this.endTime = params.getEndTime(); + } + + public String getDatafeedId() { + return datafeedId; + } + + public long getDatafeedStartTime() { + return startTime; + } + + @Nullable + public Long getEndTime() { + return endTime; + } + + public boolean isLookbackOnly() { + return endTime != null; + } + + @Override + protected void onCancelled() { + // If the persistent task framework wants us to stop then we should do so immediately and + // we should wait for an existing datafeed import to realize we want it to stop. + // Note that this only applied when task cancel is invoked and stop datafeed api doesn't use this. + // Also stop datafeed api will obey the timeout. + stop(getReasonCancelled(), TimeValue.ZERO); + } + + public void stop(String reason, TimeValue timeout) { + if (datafeedManager != null) { + datafeedManager.stopDatafeed(this, reason, timeout); + } + } + + public void isolate() { + if (datafeedManager != null) { + datafeedManager.isolateDatafeed(getAllocationId()); + } + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java new file mode 100644 index 00000000000..18a25ab7c46 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -0,0 +1,325 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.datafeed.DatafeedState; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.persistent.PersistentTasksService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class TransportStopDatafeedAction extends TransportTasksAction { + + private final PersistentTasksService persistentTasksService; + + @Inject + public TransportStopDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, PersistentTasksService persistentTasksService) { + super(settings, StopDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, + indexNameExpressionResolver, StopDatafeedAction.Request::new, StopDatafeedAction.Response::new, + MachineLearning.UTILITY_THREAD_POOL_NAME); + this.persistentTasksService = persistentTasksService; + } + + /** + * Resolve the requested datafeeds and add their IDs to one of the list + * arguments depending on datafeed state. + * + * @param request The stop datafeed request + * @param mlMetadata ML Metadata + * @param tasks Persistent task meta data + * @param startedDatafeedIds Started datafeed ids are added to this list + * @param stoppingDatafeedIds Stopping datafeed ids are added to this list + */ + static void resolveDataFeedIds(StopDatafeedAction.Request request, MlMetadata mlMetadata, + PersistentTasksCustomMetaData tasks, + List startedDatafeedIds, + List stoppingDatafeedIds) { + + Set expandedDatafeedIds = mlMetadata.expandDatafeedIds(request.getDatafeedId(), request.allowNoDatafeeds()); + for (String expandedDatafeedId : expandedDatafeedIds) { + validateDatafeedTask(expandedDatafeedId, mlMetadata); + addDatafeedTaskIdAccordingToState(expandedDatafeedId, MlMetadata.getDatafeedState(expandedDatafeedId, tasks), + startedDatafeedIds, stoppingDatafeedIds); + } + } + + private static void addDatafeedTaskIdAccordingToState(String datafeedId, + DatafeedState datafeedState, + List startedDatafeedIds, + List stoppingDatafeedIds) { + switch (datafeedState) { + case STARTED: + startedDatafeedIds.add(datafeedId); + break; + case STOPPED: + break; + case STOPPING: + stoppingDatafeedIds.add(datafeedId); + break; + default: + break; + } + } + + /** + * Validate the stop request. + * Throws an {@code ResourceNotFoundException} if there is no datafeed + * with id {@code datafeedId} + * @param datafeedId The datafeed Id + * @param mlMetadata ML meta data + */ + static void validateDatafeedTask(String datafeedId, MlMetadata mlMetadata) { + DatafeedConfig datafeed = mlMetadata.getDatafeed(datafeedId); + if (datafeed == null) { + throw new ResourceNotFoundException(Messages.getMessage(Messages.DATAFEED_NOT_FOUND, datafeedId)); + } + } + + @Override + protected void doExecute(Task task, StopDatafeedAction.Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + final DiscoveryNodes nodes = state.nodes(); + if (nodes.isLocalNodeElectedMaster() == false) { + // Delegates stop datafeed to elected master node, so it becomes the coordinating node. + // See comment in StartDatafeedAction.Transport class for more information. + if (nodes.getMasterNode() == null) { + listener.onFailure(new MasterNotDiscoveredException("no known master node")); + } else { + transportService.sendRequest(nodes.getMasterNode(), actionName, request, + new ActionListenerResponseHandler<>(listener, StopDatafeedAction.Response::new)); + } + } else { + MlMetadata mlMetadata = state.getMetaData().custom(MLMetadataField.TYPE); + PersistentTasksCustomMetaData tasks = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + + List startedDatafeeds = new ArrayList<>(); + List stoppingDatafeeds = new ArrayList<>(); + resolveDataFeedIds(request, mlMetadata, tasks, startedDatafeeds, stoppingDatafeeds); + if (startedDatafeeds.isEmpty() && stoppingDatafeeds.isEmpty()) { + listener.onResponse(new StopDatafeedAction.Response(true)); + return; + } + request.setResolvedStartedDatafeedIds(startedDatafeeds.toArray(new String[startedDatafeeds.size()])); + + if (request.isForce()) { + forceStopDatafeed(request, listener, tasks, startedDatafeeds); + } else { + normalStopDatafeed(task, request, listener, tasks, startedDatafeeds, stoppingDatafeeds); + } + } + } + + private void normalStopDatafeed(Task task, StopDatafeedAction.Request request, ActionListener listener, + PersistentTasksCustomMetaData tasks, + List startedDatafeeds, List stoppingDatafeeds) { + Set executorNodes = new HashSet<>(); + for (String datafeedId : startedDatafeeds) { + PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(datafeedId, tasks); + if (datafeedTask == null || datafeedTask.isAssigned() == false) { + String message = "Cannot stop datafeed [" + datafeedId + "] because the datafeed does not have an assigned node." + + " Use force stop to stop the datafeed"; + listener.onFailure(ExceptionsHelper.conflictStatusException(message)); + return; + } else { + executorNodes.add(datafeedTask.getExecutorNode()); + } + } + + request.setNodes(executorNodes.toArray(new String[executorNodes.size()])); + + // wait for started and stopping datafeeds + // Map datafeedId -> datafeed task Id. + List allDataFeedsToWaitFor = Stream.concat( + startedDatafeeds.stream().map(id -> MLMetadataField.datafeedTaskId(id)), + stoppingDatafeeds.stream().map(id -> MLMetadataField.datafeedTaskId(id))) + .collect(Collectors.toList()); + + ActionListener finalListener = ActionListener.wrap( + r -> waitForDatafeedStopped(allDataFeedsToWaitFor, request, r, listener), + listener::onFailure); + + super.doExecute(task, request, finalListener); + } + + private void forceStopDatafeed(final StopDatafeedAction.Request request, final ActionListener listener, + PersistentTasksCustomMetaData tasks, final List startedDatafeeds) { + final AtomicInteger counter = new AtomicInteger(); + final AtomicArray failures = new AtomicArray<>(startedDatafeeds.size()); + + for (String datafeedId : startedDatafeeds) { + PersistentTasksCustomMetaData.PersistentTask datafeedTask = MlMetadata.getDatafeedTask(datafeedId, tasks); + if (datafeedTask != null) { + persistentTasksService.cancelPersistentTask(datafeedTask.getId(), + new ActionListener>() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { + if (counter.incrementAndGet() == startedDatafeeds.size()) { + sendResponseOrFailure(request.getDatafeedId(), listener, failures); + } + } + + @Override + public void onFailure(Exception e) { + final int slot = counter.incrementAndGet(); + failures.set(slot - 1, e); + if (slot == startedDatafeeds.size()) { + sendResponseOrFailure(request.getDatafeedId(), listener, failures); + } + } + }); + } else { + String msg = "Requested datafeed [" + request.getDatafeedId() + "] be force-stopped, but " + + "datafeed's task could not be found."; + logger.warn(msg); + final int slot = counter.incrementAndGet(); + failures.set(slot - 1, new RuntimeException(msg)); + if (slot == startedDatafeeds.size()) { + sendResponseOrFailure(request.getDatafeedId(), listener, failures); + } + } + } + } + + @Override + protected void taskOperation(StopDatafeedAction.Request request, TransportStartDatafeedAction.DatafeedTask datafeedTaskTask, + ActionListener listener) { + DatafeedState taskStatus = DatafeedState.STOPPING; + datafeedTaskTask.updatePersistentStatus(taskStatus, ActionListener.wrap(task -> { + // we need to fork because we are now on a network threadpool + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + + @Override + protected void doRun() throws Exception { + datafeedTaskTask.stop("stop_datafeed (api)", request.getStopTimeout()); + listener.onResponse(new StopDatafeedAction.Response(true)); + } + }); + }, + e -> { + if (e instanceof ResourceNotFoundException) { + // the task has disappeared so must have stopped + listener.onResponse(new StopDatafeedAction.Response(true)); + } else { + listener.onFailure(e); + } + } + )); + } + + private void sendResponseOrFailure(String datafeedId, ActionListener listener, + AtomicArray failures) { + List catchedExceptions = failures.asList(); + if (catchedExceptions.size() == 0) { + listener.onResponse(new StopDatafeedAction.Response(true)); + return; + } + + String msg = "Failed to stop datafeed [" + datafeedId + "] with [" + catchedExceptions.size() + + "] failures, rethrowing last, all Exceptions: [" + + catchedExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + + "]"; + + ElasticsearchException e = new ElasticsearchException(msg, + catchedExceptions.get(0)); + listener.onFailure(e); + } + + // Wait for datafeed to be marked as stopped in cluster state, which means the datafeed persistent task has been removed + // This api returns when task has been cancelled, but that doesn't mean the persistent task has been removed from cluster state, + // so wait for that to happen here. + void waitForDatafeedStopped(List datafeedPersistentTaskIds, StopDatafeedAction.Request request, + StopDatafeedAction.Response response, + ActionListener listener) { + persistentTasksService.waitForPersistentTasksStatus(persistentTasksCustomMetaData -> { + for (String persistentTaskId: datafeedPersistentTaskIds) { + if (persistentTasksCustomMetaData.getTask(persistentTaskId) != null) { + return false; + } + } + return true; + }, request.getTimeout(), new ActionListener() { + @Override + public void onResponse(Boolean result) { + listener.onResponse(response); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + @Override + protected StopDatafeedAction.Response newResponse(StopDatafeedAction.Request request, List tasks, + List taskOperationFailures, + List failedNodeExceptions) { + // number of resolved data feeds should be equal to the number of + // tasks, otherwise something went wrong + if (request.getResolvedStartedDatafeedIds().length != tasks.size()) { + if (taskOperationFailures.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper + .convertToElastic(taskOperationFailures.get(0).getCause()); + } else if (failedNodeExceptions.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper + .convertToElastic(failedNodeExceptions.get(0)); + } else { + // This can happen we the actual task in the node no longer exists, + // which means the datafeed(s) have already been closed. + return new StopDatafeedAction.Response(true); + } + } + + return new StopDatafeedAction.Response(tasks.stream().allMatch(StopDatafeedAction.Response::isStopped)); + } + + @Override + protected StopDatafeedAction.Response readTaskResponse(StreamInput in) throws IOException { + return new StopDatafeedAction.Response(in); + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java new file mode 100644 index 00000000000..743ff42227c --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +public class TransportUpdateCalendarJobAction extends HandledTransportAction { + + private final ClusterService clusterService; + private final JobProvider jobProvider; + + @Inject + public TransportUpdateCalendarJobAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + ClusterService clusterService, JobProvider jobProvider) { + super(settings, UpdateCalendarJobAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, UpdateCalendarJobAction.Request::new); + this.clusterService = clusterService; + this.jobProvider = jobProvider; + } + + @Override + protected void doExecute(UpdateCalendarJobAction.Request request, ActionListener listener) { + ClusterState state = clusterService.state(); + MlMetadata mlMetadata = state.getMetaData().custom(MLMetadataField.TYPE); + for (String jobToAdd: request.getJobIdsToAdd()) { + if (mlMetadata.isGroupOrJob(jobToAdd) == false) { + listener.onFailure(ExceptionsHelper.missingJobException(jobToAdd)); + return; + } + } + + for (String jobToRemove: request.getJobIdsToRemove()) { + if (mlMetadata.isGroupOrJob(jobToRemove) == false) { + listener.onFailure(ExceptionsHelper.missingJobException(jobToRemove)); + return; + } + } + + jobProvider.updateCalendar(request.getCalendarId(), request.getJobIdsToAdd(), request.getJobIdsToRemove(), + c -> listener.onResponse(new PutCalendarAction.Response(c)), listener::onFailure); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java new file mode 100644 index 00000000000..065559decb4 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MlMetadata; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.ml.datafeed.DatafeedUpdate; +import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; + +public class TransportUpdateDatafeedAction extends TransportMasterNodeAction { + + @Inject + public TransportUpdateDatafeedAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, UpdateDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, UpdateDatafeedAction.Request::new); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected PutDatafeedAction.Response newResponse() { + return new PutDatafeedAction.Response(); + } + + @Override + protected void masterOperation(UpdateDatafeedAction.Request request, ClusterState state, + ActionListener listener) { + clusterService.submitStateUpdateTask("update-datafeed-" + request.getUpdate().getId(), + new AckedClusterStateUpdateTask(request, listener) { + private volatile DatafeedConfig updatedDatafeed; + + @Override + protected PutDatafeedAction.Response newResponse(boolean acknowledged) { + if (acknowledged) { + logger.info("Updated datafeed [{}]", request.getUpdate().getId()); + } + return new PutDatafeedAction.Response(acknowledged, updatedDatafeed); + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + DatafeedUpdate update = request.getUpdate(); + MlMetadata currentMetadata = currentState.getMetaData().custom(MLMetadataField.TYPE); + PersistentTasksCustomMetaData persistentTasks = + currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) + .updateDatafeed(update, persistentTasks, threadPool.getThreadContext()).build(); + updatedDatafeed = newMetadata.getDatafeed(update.getId()); + return ClusterState.builder(currentState).metaData( + MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()).build(); + } + }); + } + + @Override + protected ClusterBlockException checkBlock(UpdateDatafeedAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateJobAction.java new file mode 100644 index 00000000000..7273d271361 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateJobAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.JobManager; + +public class TransportUpdateJobAction extends TransportMasterNodeAction { + + private final JobManager jobManager; + + @Inject + public TransportUpdateJobAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager) { + super(settings, UpdateJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, UpdateJobAction.Request::new); + this.jobManager = jobManager; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected PutJobAction.Response newResponse() { + return new PutJobAction.Response(); + } + + @Override + protected void masterOperation(UpdateJobAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + if (request.getJobId().equals(MetaData.ALL)) { + throw new IllegalArgumentException("Job Id " + MetaData.ALL + " cannot be for update"); + } + + jobManager.updateJob(request.getJobId(), request.getJobUpdate(), request, listener); + } + + @Override + protected ClusterBlockException checkBlock(UpdateJobAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java new file mode 100644 index 00000000000..cf7e9c1b75f --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.results.Result; + +import java.io.IOException; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class TransportUpdateModelSnapshotAction extends HandledTransportAction { + + private final JobProvider jobProvider; + private final Client client; + + @Inject + public TransportUpdateModelSnapshotAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + JobProvider jobProvider, Client client) { + super(settings, UpdateModelSnapshotAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + UpdateModelSnapshotAction.Request::new); + this.jobProvider = jobProvider; + this.client = client; + } + + @Override + protected void doExecute(UpdateModelSnapshotAction.Request request, ActionListener listener) { + logger.debug("Received request to update model snapshot [{}] for job [{}]", request.getSnapshotId(), request.getJobId()); + jobProvider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { + if (modelSnapshot == null) { + listener.onFailure(new ResourceNotFoundException(Messages.getMessage( + Messages.REST_NO_SUCH_MODEL_SNAPSHOT, request.getSnapshotId(), request.getJobId()))); + } else { + Result updatedSnapshot = applyUpdate(request, modelSnapshot); + indexModelSnapshot(updatedSnapshot, b -> { + // The quantiles can be large, and totally dominate the output - + // it's clearer to remove them + listener.onResponse(new UpdateModelSnapshotAction.Response( + new ModelSnapshot.Builder(updatedSnapshot.result).setQuantiles(null).build())); + }, listener::onFailure); + } + }, listener::onFailure); + } + + private static Result applyUpdate(UpdateModelSnapshotAction.Request request, Result target) { + ModelSnapshot.Builder updatedSnapshotBuilder = new ModelSnapshot.Builder(target.result); + if (request.getDescription() != null) { + updatedSnapshotBuilder.setDescription(request.getDescription()); + } + if (request.getRetain() != null) { + updatedSnapshotBuilder.setRetain(request.getRetain()); + } + return new Result(target.index, updatedSnapshotBuilder.build()); + } + + private void indexModelSnapshot(Result modelSnapshot, Consumer handler, Consumer errorHandler) { + IndexRequest indexRequest = new IndexRequest(modelSnapshot.index, ElasticsearchMappings.DOC_TYPE, + ModelSnapshot.documentId(modelSnapshot.result)); + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + modelSnapshot.result.toXContent(builder, ToXContent.EMPTY_PARAMS); + indexRequest.source(builder); + } catch (IOException e) { + errorHandler.accept(e); + return; + } + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + bulkRequestBuilder.add(indexRequest); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse indexResponse) { + handler.accept(true); + } + + @Override + public void onFailure(Exception e) { + errorHandler.accept(e); + } + }); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateProcessAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateProcessAction.java new file mode 100644 index 00000000000..84cfe668c48 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateProcessAction.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.job.process.autodetect.UpdateParams; + +import java.io.IOException; + +public class TransportUpdateProcessAction extends TransportJobTaskAction { + + @Inject + public TransportUpdateProcessAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ClusterService clusterService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + AutodetectProcessManager processManager) { + super(settings, UpdateProcessAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + UpdateProcessAction.Request::new, UpdateProcessAction.Response::new, ThreadPool.Names.SAME, processManager); + // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread + } + + @Override + protected UpdateProcessAction.Response readTaskResponse(StreamInput in) throws IOException { + UpdateProcessAction.Response response = new UpdateProcessAction.Response(); + response.readFrom(in); + return response; + } + + @Override + protected void taskOperation(UpdateProcessAction.Request request, TransportOpenJobAction.JobTask task, + ActionListener listener) { + try { + processManager.writeUpdateProcessMessage(task, + new UpdateParams(request.getModelPlotConfig(), + request.getDetectorUpdates(), request.isUpdateSpecialEvents()), + e -> { + if (e == null) { + listener.onResponse(new UpdateProcessAction.Response()); + } else { + listener.onFailure(e); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java new file mode 100644 index 00000000000..510c1580822 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +public class TransportValidateDetectorAction extends HandledTransportAction { + + @Inject + public TransportValidateDetectorAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, ValidateDetectorAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + ValidateDetectorAction.Request::new); + } + + @Override + protected void doExecute(ValidateDetectorAction.Request request, ActionListener listener) { + listener.onResponse(new ValidateDetectorAction.Response(true)); + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java new file mode 100644 index 00000000000..822b3ba7852 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +public class TransportValidateJobConfigAction extends HandledTransportAction { + + @Inject + public TransportValidateJobConfigAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, ValidateJobConfigAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, + ValidateJobConfigAction.Request::new); + } + + @Override + protected void doExecute(ValidateJobConfigAction.Request request, ActionListener listener) { + listener.onResponse(new ValidateJobConfigAction.Response(true)); + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java deleted file mode 100644 index e1c2591a7a6..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.action; - -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.MlMetadata; -import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.ml.datafeed.DatafeedUpdate; -import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; - -import java.io.IOException; -import java.util.Objects; - -public class UpdateDatafeedAction extends Action { - - public static final UpdateDatafeedAction INSTANCE = new UpdateDatafeedAction(); - public static final String NAME = "cluster:admin/xpack/ml/datafeeds/update"; - - private UpdateDatafeedAction() { - super(NAME); - } - - @Override - public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); - } - - @Override - public PutDatafeedAction.Response newResponse() { - return new PutDatafeedAction.Response(); - } - - public static class Request extends AcknowledgedRequest implements ToXContentObject { - - public static Request parseRequest(String datafeedId, XContentParser parser) { - DatafeedUpdate.Builder update = DatafeedUpdate.PARSER.apply(parser, null); - update.setId(datafeedId); - return new Request(update.build()); - } - - private DatafeedUpdate update; - - public Request(DatafeedUpdate update) { - this.update = update; - } - - Request() { - } - - public DatafeedUpdate getUpdate() { - return update; - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - update = new DatafeedUpdate(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - update.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - update.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Request request = (Request) o; - return Objects.equals(update, request.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - } - - public static class RequestBuilder extends MasterNodeOperationRequestBuilder { - - public RequestBuilder(ElasticsearchClient client, UpdateDatafeedAction action) { - super(client, action, new Request()); - } - } - - public static class TransportAction extends TransportMasterNodeAction { - - @Inject - public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, UpdateDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, Request::new); - } - - @Override - protected String executor() { - return ThreadPool.Names.SAME; - } - - @Override - protected PutDatafeedAction.Response newResponse() { - return new PutDatafeedAction.Response(); - } - - @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) { - clusterService.submitStateUpdateTask("update-datafeed-" + request.getUpdate().getId(), - new AckedClusterStateUpdateTask(request, listener) { - private volatile DatafeedConfig updatedDatafeed; - - @Override - protected PutDatafeedAction.Response newResponse(boolean acknowledged) { - if (acknowledged) { - logger.info("Updated datafeed [{}]", request.getUpdate().getId()); - } - return new PutDatafeedAction.Response(acknowledged, updatedDatafeed); - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - DatafeedUpdate update = request.getUpdate(); - MlMetadata currentMetadata = currentState.getMetaData().custom(MlMetadata.TYPE); - PersistentTasksCustomMetaData persistentTasks = - currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) - .updateDatafeed(update, persistentTasks, threadPool.getThreadContext()).build(); - updatedDatafeed = newMetadata.getDatafeed(update.getId()); - return ClusterState.builder(currentState).metaData( - MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build()).build(); - } - }); - } - - @Override - protected ClusterBlockException checkBlock(Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java index 928f8ecb0cf..d0a843ad41d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java @@ -20,10 +20,12 @@ import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.CloseJobAction; import org.elasticsearch.xpack.ml.action.StartDatafeedAction; +import org.elasticsearch.xpack.ml.action.TransportStartDatafeedAction; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.messages.Messages; @@ -76,10 +78,10 @@ public class DatafeedManager extends AbstractComponent { clusterService.addListener(taskRunner); } - public void run(StartDatafeedAction.DatafeedTask task, Consumer taskHandler) { + public void run(TransportStartDatafeedAction.DatafeedTask task, Consumer taskHandler) { String datafeedId = task.getDatafeedId(); ClusterState state = clusterService.state(); - MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE); + MlMetadata mlMetadata = state.metaData().custom(MLMetadataField.TYPE); if (mlMetadata == null) { mlMetadata = MlMetadata.EMPTY_METADATA; } @@ -109,7 +111,7 @@ public class DatafeedManager extends AbstractComponent { datafeedJobBuilder.build(job, datafeed, datafeedJobHandler); } - public void stopDatafeed(StartDatafeedAction.DatafeedTask task, String reason, TimeValue timeout) { + public void stopDatafeed(TransportStartDatafeedAction.DatafeedTask task, String reason, TimeValue timeout) { logger.info("[{}] attempt to stop datafeed [{}] [{}]", reason, task.getDatafeedId(), task.getAllocationId()); Holder holder = runningDatafeedsOnThisNode.remove(task.getAllocationId()); if (holder != null) { @@ -256,11 +258,11 @@ public class DatafeedManager extends AbstractComponent { } } - private String getJobId(StartDatafeedAction.DatafeedTask task) { + private String getJobId(TransportStartDatafeedAction.DatafeedTask task) { return runningDatafeedsOnThisNode.get(task.getAllocationId()).getJobId(); } - private JobState getJobState(PersistentTasksCustomMetaData tasks, StartDatafeedAction.DatafeedTask datafeedTask) { + private JobState getJobState(PersistentTasksCustomMetaData tasks, TransportStartDatafeedAction.DatafeedTask datafeedTask) { return MlMetadata.getJobState(getJobId(datafeedTask), tasks); } @@ -449,9 +451,9 @@ public class DatafeedManager extends AbstractComponent { private class TaskRunner implements ClusterStateListener { - private final List tasksToRun = new CopyOnWriteArrayList<>(); + private final List tasksToRun = new CopyOnWriteArrayList<>(); - private void runWhenJobIsOpened(StartDatafeedAction.DatafeedTask datafeedTask) { + private void runWhenJobIsOpened(TransportStartDatafeedAction.DatafeedTask datafeedTask) { ClusterState clusterState = clusterService.state(); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); if (getJobState(tasks, datafeedTask) == JobState.OPENED) { @@ -463,7 +465,7 @@ public class DatafeedManager extends AbstractComponent { } } - private void runTask(StartDatafeedAction.DatafeedTask task) { + private void runTask(TransportStartDatafeedAction.DatafeedTask task) { // This clearing of the thread context is not strictly necessary. Every action performed by the // datafeed _should_ be done using the MlClientHelper, which will set the appropriate thread // context. However, by clearing the thread context here if anyone forgets to use MlClientHelper @@ -487,8 +489,8 @@ public class DatafeedManager extends AbstractComponent { return; } - List remainingTasks = new ArrayList<>(); - for (StartDatafeedAction.DatafeedTask datafeedTask : tasksToRun) { + List remainingTasks = new ArrayList<>(); + for (TransportStartDatafeedAction.DatafeedTask datafeedTask : tasksToRun) { if (runningDatafeedsOnThisNode.containsKey(datafeedTask.getAllocationId()) == false) { continue; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java index 22c82c0a085..46969c184ac 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobTaskStatus; @@ -31,7 +32,7 @@ public class DatafeedNodeSelector { private final IndexNameExpressionResolver resolver; public DatafeedNodeSelector(ClusterState clusterState, IndexNameExpressionResolver resolver, String datafeedId) { - MlMetadata mlMetadata = Objects.requireNonNull(clusterState.metaData().custom(MlMetadata.TYPE)); + MlMetadata mlMetadata = Objects.requireNonNull(clusterState.metaData().custom(MLMetadataField.TYPE)); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); this.datafeed = mlMetadata.getDatafeed(datafeedId); this.jobTask = MlMetadata.getJobTask(datafeed.getJobId(), tasks); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFields.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFields.java index e3af15f818c..d5539549423 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFields.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedFields.java @@ -123,9 +123,9 @@ class ExtractedFields { method = ExtractedField.ExtractionMethod.DOC_VALUE; } else if (isText(field)) { String parentField = MlStrings.getParentField(field); - // Field is text so check if it is a multi-field + // ThrottlerField is text so check if it is a multi-field if (Objects.equals(parentField, field) == false && fieldsCapabilities.getField(parentField) != null) { - // Field is a multi-field which means it won't be available in source. Let's take the parent instead. + // ThrottlerField is a multi-field which means it won't be available in source. Let's take the parent instead. internalField = parentField; method = isAggregatable(parentField) ? ExtractedField.ExtractionMethod.DOC_VALUE : ExtractedField.ExtractionMethod.SOURCE; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index e1114a0ed0b..dea45b3c4c4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -24,7 +24,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.DeleteJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction; @@ -88,9 +89,9 @@ public class JobManager extends AbstractComponent { this.client = Objects.requireNonNull(client); this.updateJobProcessNotifier = updateJobProcessNotifier; - maxModelMemoryLimit = MachineLearning.MAX_MODEL_MEMORY_LIMIT.get(settings); + maxModelMemoryLimit = MachineLearningClientActionPlugin.MAX_MODEL_MEMORY_LIMIT.get(settings); clusterService.getClusterSettings() - .addSettingsUpdateConsumer(MachineLearning.MAX_MODEL_MEMORY_LIMIT, this::setMaxModelMemoryLimit); + .addSettingsUpdateConsumer(MachineLearningClientActionPlugin.MAX_MODEL_MEMORY_LIMIT, this::setMaxModelMemoryLimit); } private void setMaxModelMemoryLimit(ByteSizeValue maxModelMemoryLimit) { @@ -117,7 +118,7 @@ public class JobManager extends AbstractComponent { * @throws ResourceNotFoundException if no job matches {@code jobId} */ public static Job getJobOrThrowIfUnknown(String jobId, ClusterState clusterState) { - MlMetadata mlMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE); + MlMetadata mlMetadata = clusterState.getMetaData().custom(MLMetadataField.TYPE); Job job = (mlMetadata == null) ? null : mlMetadata.getJobs().get(jobId); if (job == null) { throw ExceptionsHelper.missingJobException(jobId); @@ -135,7 +136,7 @@ public class JobManager extends AbstractComponent { * @return A {@link QueryPage} containing the matching {@code Job}s */ public QueryPage expandJobs(String expression, boolean allowNoJobs, ClusterState clusterState) { - MlMetadata mlMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE); + MlMetadata mlMetadata = clusterState.getMetaData().custom(MLMetadataField.TYPE); if (mlMetadata == null) { mlMetadata = MlMetadata.EMPTY_METADATA; } @@ -169,7 +170,7 @@ public class JobManager extends AbstractComponent { DEPRECATION_LOGGER.deprecated("Creating jobs with delimited data format is deprecated. Please use JSON instead."); } - MlMetadata currentMlMetadata = state.metaData().custom(MlMetadata.TYPE); + MlMetadata currentMlMetadata = state.metaData().custom(MLMetadataField.TYPE); if (currentMlMetadata != null && currentMlMetadata.getJobs().containsKey(job.getId())) { actionListener.onFailure(ExceptionsHelper.jobAlreadyExists(job.getId())); return; @@ -337,7 +338,7 @@ public class JobManager extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) throws Exception { - MlMetadata currentMlMetadata = currentState.metaData().custom(MlMetadata.TYPE); + MlMetadata currentMlMetadata = currentState.metaData().custom(MLMetadataField.TYPE); if (currentMlMetadata.getJobs().containsKey(jobId) == false) { // We wouldn't have got here if the job never existed so // the Job must have been deleted by another action. @@ -427,13 +428,13 @@ public class JobManager extends AbstractComponent { } private static MlMetadata.Builder createMlMetadataBuilder(ClusterState currentState) { - MlMetadata currentMlMetadata = currentState.metaData().custom(MlMetadata.TYPE); + MlMetadata currentMlMetadata = currentState.metaData().custom(MLMetadataField.TYPE); return new MlMetadata.Builder(currentMlMetadata); } private static ClusterState buildNewClusterState(ClusterState currentState, MlMetadata.Builder builder) { ClusterState.Builder newState = ClusterState.builder(currentState); - newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()); + newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, builder.build()).build()); return newState.build(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java index 17f672389da..b3d044b1807 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java @@ -42,6 +42,7 @@ import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.Optional; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -238,10 +239,25 @@ public class AutodetectCommunicator implements Closeable { } public void forecastJob(ForecastParams params, BiConsumer handler) { + BiConsumer forecastConsumer = (aVoid, e) -> { + if (e == null) { + FlushJobParams flushParams = FlushJobParams.builder().build(); + flushJob(flushParams, (flushAcknowledgement, flushException) -> { + if (flushException != null) { + String msg = String.format(Locale.ROOT, "[%s] exception while flushing job", job.getId()); + handler.accept(null, ExceptionsHelper.serverError(msg, e)); + } else { + handler.accept(null, null); + } + }); + } else { + handler.accept(null, e); + } + }; submitOperation(() -> { autodetectProcess.forecastJob(params); return null; - }, handler); + }, forecastConsumer); } @Nullable diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index b2d737ef608..f3857b6c5c6 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -23,15 +23,13 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.action.OpenJobAction.JobTask; +import org.elasticsearch.xpack.ml.action.TransportOpenJobAction.JobTask; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.calendars.SpecialEvent; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobTaskStatus; -import org.elasticsearch.xpack.ml.job.config.JobUpdate; -import org.elasticsearch.xpack.ml.job.config.ModelPlotConfig; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcess.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcess.java index 74cb30ab5e8..d8875f7627f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcess.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcess.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.job.process.autodetect; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.ModelPlotConfig; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; @@ -187,7 +187,7 @@ class NativeAutodetectProcess implements AutodetectProcess { // to the state processor - it may take a long time for all the model state to be // indexed if (stateProcessorFuture != null) { - stateProcessorFuture.get(MachineLearning.STATE_PERSIST_RESTORE_TIMEOUT.getMinutes(), TimeUnit.MINUTES); + stateProcessorFuture.get(MachineLearningClientActionPlugin.STATE_PERSIST_RESTORE_TIMEOUT.getMinutes(), TimeUnit.MINUTES); } // the log processor should have stopped by now too - assume processing the logs will // take no more than 5 seconds longer than processing the state (usually it should diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java index 1dda562447a..9b2c377076e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.ml.job.process.autodetect; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.action.OpenJobAction.JobTask; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; +import org.elasticsearch.xpack.ml.action.TransportOpenJobAction.JobTask; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; @@ -55,7 +55,7 @@ final class ProcessContext { void tryLock() { try { - if (lock.tryLock(MachineLearning.STATE_PERSIST_RESTORE_TIMEOUT.getSeconds(), TimeUnit.SECONDS) == false) { + if (lock.tryLock(MachineLearningClientActionPlugin.STATE_PERSIST_RESTORE_TIMEOUT.getSeconds(), TimeUnit.SECONDS) == false) { LOGGER.error("Failed to acquire process lock for job [{}]", jobTask.getJobId()); throw ExceptionsHelper.serverError("Failed to acquire process lock for job [" + jobTask.getJobId() + "]"); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java index b1c2ebb0528..61e4c49f3bb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.UpdateJobAction; import org.elasticsearch.xpack.ml.job.config.JobUpdate; @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.ml.job.results.Bucket; import org.elasticsearch.xpack.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.ml.job.results.Forecast; import org.elasticsearch.xpack.ml.job.results.ForecastRequestStats; +import org.elasticsearch.xpack.ml.job.results.ForecastRequestStats.ForecastRequestStatus; import org.elasticsearch.xpack.ml.job.results.Influencer; import org.elasticsearch.xpack.ml.job.results.ModelPlot; @@ -224,14 +225,18 @@ public class AutoDetectResultProcessor { LOGGER.trace("Received Forecast Stats [{}]", forecastRequestStats.getId()); context.bulkResultsPersister.persistForecastRequestStats(forecastRequestStats); - double forecastProgress = forecastRequestStats.getProgress(); - - // persist if progress is 0 (probably some error condition) or 1 (finished), + // execute the bulk request only in some cases or in doubt // otherwise rely on the count-based trigger - if (forecastProgress == 0.0 || forecastProgress >= 1.0) { - // if forecast stats progress is 1.0 it marks the end of a forecast, - // therefore commit whatever we have - context.bulkResultsPersister.executeRequest(); + switch (forecastRequestStats.getStatus()) { + case OK: + case STARTED: + break; + case FAILED: + case SCHEDULED: + case FINISHED: + default: + context.bulkResultsPersister.executeRequest(); + } } ModelSizeStats modelSizeStats = result.getModelSizeStats(); @@ -345,7 +350,8 @@ public class AutoDetectResultProcessor { try { // Although the results won't take 30 minutes to finish, the pipe won't be closed // until the state is persisted, and that can take a while - if (completionLatch.await(MachineLearning.STATE_PERSIST_RESTORE_TIMEOUT.getMinutes(), TimeUnit.MINUTES) == false) { + if (completionLatch.await(MachineLearningClientActionPlugin.STATE_PERSIST_RESTORE_TIMEOUT.getMinutes(), + TimeUnit.MINUTES) == false) { throw new TimeoutException("Timed out waiting for results processor to complete for job " + jobId); } // Input stream has been completely processed at this point. diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParser.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParser.java index 10c9d7f3cf5..9c065009f95 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParser.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParser.java @@ -87,7 +87,7 @@ public class AutodetectResultsParser extends AbstractComponent { consumeAndCloseStream(in); return false; } else if (token != XContentParser.Token.START_OBJECT) { - logger.error("Expecting Json Field name token after the Start Object token"); + logger.error("Expecting Json ThrottlerField name token after the Start Object token"); consumeAndCloseStream(in); throw new ElasticsearchParseException("unexpected token [" + token + "]"); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/CsvDataToProcessWriter.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/CsvDataToProcessWriter.java index 03c9e640b25..42b7a41b060 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/CsvDataToProcessWriter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/CsvDataToProcessWriter.java @@ -152,7 +152,7 @@ class CsvDataToProcessWriter extends AbstractDataToProcessWriter { } Integer index = inputFieldIndexes.get(field); if (index == null) { - String msg = String.format(Locale.ROOT, "Field configured for analysis '%s' is not in the CSV header '%s'", + String msg = String.format(Locale.ROOT, "ThrottlerField configured for analysis '%s' is not in the CSV header '%s'", field, Arrays.toString(header)); LOGGER.error(msg); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/logging/CppLogMessage.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/logging/CppLogMessage.java index 6064cfef31b..511af3b5734 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/logging/CppLogMessage.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/logging/CppLogMessage.java @@ -23,7 +23,7 @@ import java.util.Objects; */ public class CppLogMessage implements ToXContentObject, Writeable { /** - * Field Names (these are defined by log4cxx; we have no control over them) + * ThrottlerField Names (these are defined by log4cxx; we have no control over them) */ public static final ParseField LOGGER_FIELD = new ParseField("logger"); public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java index 44ff73d465b..62486905b31 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.results.Result; @@ -62,7 +63,7 @@ abstract class AbstractExpiredJobDataRemover implements MlDataRemover { private Iterator newJobIterator() { List jobs = new ArrayList<>(); ClusterState clusterState = clusterService.state(); - MlMetadata mlMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE); + MlMetadata mlMetadata = clusterState.getMetaData().custom(MLMetadataField.TYPE); if (mlMetadata != null) { jobs.addAll(mlMetadata.getJobs().values()); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java index 0f2b9aa2b73..52c13741e82 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshotField; import java.util.ArrayList; import java.util.Iterator; @@ -70,7 +71,7 @@ public class ExpiredModelSnapshotsRemover extends AbstractExpiredJobDataRemover searchRequest.indices(AnomalyDetectorsIndex.jobResultsAliasedName(job.getId())); QueryBuilder activeSnapshotFilter = QueryBuilders.termQuery( - ModelSnapshot.SNAPSHOT_ID.getPreferredName(), job.getModelSnapshotId()); + ModelSnapshotField.SNAPSHOT_ID.getPreferredName(), job.getModelSnapshotId()); QueryBuilder retainFilter = QueryBuilders.termQuery(ModelSnapshot.RETAIN.getPreferredName(), true); QueryBuilder query = createQuery(job.getId(), cutoffEpochMs) .filter(QueryBuilders.existsQuery(ModelSnapshot.SNAPSHOT_DOC_COUNT.getPreferredName())) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java index 95d3398fcc0..55dea1373fb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestDeleteModelSnapshotAction.java @@ -14,7 +14,7 @@ import org.elasticsearch.rest.action.AcknowledgedRestListener; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshotField; import java.io.IOException; @@ -23,7 +23,7 @@ public class RestDeleteModelSnapshotAction extends BaseRestHandler { public RestDeleteModelSnapshotAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(RestRequest.Method.DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" - + Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshot.SNAPSHOT_ID.getPreferredName() + "}", this); + + Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID.getPreferredName() + "}", this); } @Override @@ -35,7 +35,7 @@ public class RestDeleteModelSnapshotAction extends BaseRestHandler { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { DeleteModelSnapshotAction.Request deleteModelSnapshot = new DeleteModelSnapshotAction.Request( restRequest.param(Job.ID.getPreferredName()), - restRequest.param(ModelSnapshot.SNAPSHOT_ID.getPreferredName())); + restRequest.param(ModelSnapshotField.SNAPSHOT_ID.getPreferredName())); return channel -> client.execute(DeleteModelSnapshotAction.INSTANCE, deleteModelSnapshot, new AcknowledgedRestListener<>(channel)); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java index 0531c85e6eb..6d61d50c473 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestUpdateModelSnapshotAction.java @@ -15,7 +15,7 @@ import org.elasticsearch.rest.action.RestStatusToXContentListener; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshotField; import java.io.IOException; @@ -24,7 +24,7 @@ public class RestUpdateModelSnapshotAction extends BaseRestHandler { public RestUpdateModelSnapshotAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" - + Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshot.SNAPSHOT_ID +"}/_update", + + Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID +"}/_update", this); } @@ -38,7 +38,7 @@ public class RestUpdateModelSnapshotAction extends BaseRestHandler { XContentParser parser = restRequest.contentParser(); UpdateModelSnapshotAction.Request updateModelSnapshot = UpdateModelSnapshotAction.Request.parseRequest( restRequest.param(Job.ID.getPreferredName()), - restRequest.param(ModelSnapshot.SNAPSHOT_ID.getPreferredName()), + restRequest.param(ModelSnapshotField.SNAPSHOT_ID.getPreferredName()), parser); return channel -> diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 85413348ccd..568ad2bef74 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.monitoring; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -20,13 +19,13 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.XPackClientActionPlugin; import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; @@ -62,7 +61,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.common.settings.Setting.boolSetting; import static org.elasticsearch.common.settings.Setting.timeSetting; -import static org.elasticsearch.xpack.XPackPlugin.MONITORING; +import static org.elasticsearch.xpack.XpackField.MONITORING; /** * This class activates/deactivates the monitoring modules depending if we're running a node client, transport client or tribe client: @@ -74,26 +73,6 @@ public class Monitoring implements ActionPlugin { public static final String NAME = "monitoring"; - /** - * The minimum amount of time allowed for the history duration. - */ - public static final TimeValue HISTORY_DURATION_MINIMUM = TimeValue.timeValueHours(24); - - /** - * The default retention duration of the monitoring history data. - *

    - * Expected values: - *

      - *
    • Default: 7 days
    • - *
    • Minimum: 1 day
    • - *
    - * - * @see #HISTORY_DURATION_MINIMUM - */ - public static final Setting HISTORY_DURATION = timeSetting("xpack.monitoring.history.duration", - TimeValue.timeValueHours(7 * 24), // default value (7 days) - HISTORY_DURATION_MINIMUM, // minimum value - Setting.Property.Dynamic, Setting.Property.NodeScope); /** * The ability to automatically cleanup ".watcher_history*" indices while also cleaning up Monitoring indices. */ @@ -112,7 +91,7 @@ public class Monitoring implements ActionPlugin { this.licenseState = licenseState; this.enabled = XPackSettings.MONITORING_ENABLED.get(settings); this.transportClientMode = XPackPlugin.transportClientMode(settings); - this.tribeNode = XPackPlugin.isTribeNode(settings); + this.tribeNode = XPackClientActionPlugin.isTribeNode(settings); } public static Collection getNamedWriteables() { @@ -186,7 +165,7 @@ public class Monitoring implements ActionPlugin { public List> getSettings() { return Collections.unmodifiableList( - Arrays.asList(HISTORY_DURATION, + Arrays.asList(MonitoringField.HISTORY_DURATION, CLEAN_WATCHER_HISTORY, MonitoringService.INTERVAL, Exporters.EXPORTERS_SETTINGS, diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java index 871a5b68a08..49d32c13b61 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerService.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.monitoring.Monitoring; +import org.elasticsearch.xpack.monitoring.MonitoringField; import org.joda.time.DateTime; import org.joda.time.chrono.ISOChronology; @@ -41,11 +41,11 @@ public class CleanerService extends AbstractLifecycleComponent { this.licenseState = licenseState; this.threadPool = threadPool; this.executionScheduler = executionScheduler; - this.globalRetention = Monitoring.HISTORY_DURATION.get(settings); + this.globalRetention = MonitoringField.HISTORY_DURATION.get(settings); this.runnable = new IndicesCleaner(); // the validation is performed by the setting's object itself - clusterSettings.addSettingsUpdateConsumer(Monitoring.HISTORY_DURATION, this::setGlobalRetention); + clusterSettings.addSettingsUpdateConsumer(MonitoringField.HISTORY_DURATION, this::setGlobalRetention); } public CleanerService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, XPackLicenseState licenseState) { @@ -91,7 +91,7 @@ public class CleanerService extends AbstractLifecycleComponent { return globalRetention; } else { - return Monitoring.HISTORY_DURATION.getDefault(Settings.EMPTY); + return MonitoringField.HISTORY_DURATION.getDefault(Settings.EMPTY); } } @@ -106,7 +106,7 @@ public class CleanerService extends AbstractLifecycleComponent { public void setGlobalRetention(TimeValue globalRetention) { // notify the user that their setting will be ignored until they get the right license if (licenseState.isUpdateRetentionAllowed() == false) { - logger.warn("[{}] setting will be ignored until an appropriate license is applied", Monitoring.HISTORY_DURATION.getKey()); + logger.warn("[{}] setting will be ignored until an appropriate license is applied", MonitoringField.HISTORY_DURATION.getKey()); } this.globalRetention = globalRetention; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java index b86aa071804..6b8d711c596 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.monitoring.Monitoring; import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; @@ -168,7 +168,7 @@ public abstract class Collector extends AbstractComponent { protected static String collectionSetting(final String settingName) { Objects.requireNonNull(settingName, "setting name must not be null"); - return XPackPlugin.featureSettingPrefix(Monitoring.NAME) + ".collection." + settingName; + return XpackField.featureSettingPrefix(Monitoring.NAME) + ".collection." + settingName; } protected static Setting collectionTimeoutSetting(final String settingName) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheck.java b/plugin/src/main/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheck.java index c9674aa7c8c..3faa517d904 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheck.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheck.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.ssl.SSLService; import java.util.Map; import static org.elasticsearch.xpack.XPackSettings.HTTP_SSL_ENABLED; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; class PkiRealmBootstrapCheck implements BootstrapCheck { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java b/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java index 48146b89fe7..61d9a7fec71 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -78,6 +78,7 @@ import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.extensions.XPackExtension; import org.elasticsearch.xpack.extensions.XPackExtensionsService; import org.elasticsearch.xpack.security.action.filter.SecurityActionFilter; @@ -182,6 +183,7 @@ import org.elasticsearch.xpack.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.ssl.SSLService; import org.elasticsearch.xpack.ssl.TLSLicenseBootstrapCheck; import org.elasticsearch.xpack.ssl.action.GetCertificateInfoAction; +import org.elasticsearch.xpack.ssl.action.TransportGetCertificateInfoAction; import org.elasticsearch.xpack.ssl.rest.RestGetCertificateInfoAction; import org.elasticsearch.xpack.template.TemplateUtils; import org.joda.time.DateTime; @@ -219,13 +221,13 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin, Clus private static final Logger logger = Loggers.getLogger(XPackPlugin.class); - public static final String NAME4 = XPackPlugin.SECURITY + "4"; + public static final String NAME4 = XpackField.SECURITY + "4"; public static final Setting> USER_SETTING = - new Setting<>(setting("user"), (String) null, Optional::ofNullable, Property.NodeScope); + new Setting<>(SecurityField.setting("user"), (String) null, Optional::ofNullable, Property.NodeScope); static final Setting> AUDIT_OUTPUTS_SETTING = - Setting.listSetting(setting("audit.outputs"), - s -> s.keySet().contains(setting("audit.outputs")) ? + Setting.listSetting(SecurityField.setting("audit.outputs"), + s -> s.keySet().contains(SecurityField.setting("audit.outputs")) ? Collections.emptyList() : Collections.singletonList(LoggingAuditTrail.NAME), Function.identity(), Property.NodeScope); @@ -531,21 +533,21 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin, Clus settingsList.addAll(SSLConfigurationSettings.getProfileSettings()); // hide settings - settingsList.add(Setting.listSetting(setting("hide_settings"), Collections.emptyList(), Function.identity(), + settingsList.add(Setting.listSetting(SecurityField.setting("hide_settings"), Collections.emptyList(), Function.identity(), Property.NodeScope, Property.Filtered)); return settingsList; } public List getSettingsFilter(@Nullable XPackExtensionsService extensionsService) { - List asArray = settings.getAsList(setting("hide_settings")); + List asArray = settings.getAsList(SecurityField.setting("hide_settings")); ArrayList settingsFilter = new ArrayList<>(asArray); final List extensions = extensionsService == null ? Collections.emptyList() : extensionsService.getExtensions(); settingsFilter.addAll(RealmSettings.getSettingsFilter(extensions)); // hide settings where we don't define them - they are part of a group... - settingsFilter.add("transport.profiles.*." + setting("*")); + settingsFilter.add("transport.profiles.*." + SecurityField.setting("*")); return settingsFilter; } @@ -608,7 +610,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin, Clus new ActionHandler<>(DeleteRoleMappingAction.INSTANCE, TransportDeleteRoleMappingAction.class), new ActionHandler<>(CreateTokenAction.INSTANCE, TransportCreateTokenAction.class), new ActionHandler<>(InvalidateTokenAction.INSTANCE, TransportInvalidateTokenAction.class), - new ActionHandler<>(GetCertificateInfoAction.INSTANCE, GetCertificateInfoAction.TransportAction.class) + new ActionHandler<>(GetCertificateInfoAction.INSTANCE, TransportGetCertificateInfoAction.class) ); } @@ -720,7 +722,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin, Clus }); } - Map realmsSettings = settings.getGroups(setting("authc.realms"), true); + Map realmsSettings = settings.getGroups(SecurityField.setting("authc.realms"), true); final boolean hasNativeRealm = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings) || realmsSettings.isEmpty() || realmsSettings.entrySet().stream() @@ -734,15 +736,6 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin, Clus } } - public static String settingPrefix() { - return XPackPlugin.featureSettingPrefix(XPackPlugin.SECURITY) + "."; - } - - public static String setting(String setting) { - assert setting != null && setting.startsWith(".") == false; - return settingPrefix() + setting; - } - static boolean indexAuditLoggingEnabled(Settings settings) { if (XPackSettings.AUDIT_ENABLED.get(settings)) { List outputs = AUDIT_OUTPUTS_SETTING.get(settings); @@ -896,7 +889,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin, Clus List entries = new ArrayList<>(); entries.add(new NamedWriteableRegistry.Entry(ClusterState.Custom.class, TokenMetaData.TYPE, TokenMetaData::new)); entries.add(new NamedWriteableRegistry.Entry(NamedDiff.class, TokenMetaData.TYPE, TokenMetaData::readDiffFrom)); - entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackPlugin.SECURITY, SecurityFeatureSet.Usage::new)); + entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XpackField.SECURITY, SecurityFeatureSet.Usage::new)); entries.addAll(Arrays.asList(ExpressionParser.NAMED_WRITEABLES)); return entries; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java b/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java index 562f36a8c41..b685f2dedab 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java @@ -22,8 +22,8 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.XPackFeatureSet; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; @@ -67,7 +67,7 @@ public class SecurityFeatureSet implements XPackFeatureSet { @Override public String name() { - return XPackPlugin.SECURITY; + return XpackField.SECURITY; } @Override @@ -199,7 +199,7 @@ public class SecurityFeatureSet implements XPackFeatureSet { Map rolesStoreUsage, Map roleMappingStoreUsage, Map sslUsage, Map auditUsage, Map ipFilterUsage, Map anonymousUsage) { - super(XPackPlugin.SECURITY, available, enabled); + super(XpackField.SECURITY, available, enabled); this.realmsUsage = realmsUsage; this.rolesStoreUsage = rolesStoreUsage; this.roleMappingStoreUsage = roleMappingStoreUsage; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java index aacd19981c3..a8738c054d2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java @@ -24,7 +24,7 @@ import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.action.SecurityActionMapper; import org.elasticsearch.xpack.security.action.interceptor.RequestInterceptor; @@ -81,7 +81,7 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil logger.error("blocking [{}] operation due to expired license. Cluster health, cluster stats and indices stats \n" + "operations are blocked on license expiration. All data operations (read and write) continue to work. \n" + "If you have a new license, please update it. Otherwise, please reach out to your support contact.", action); - throw LicenseUtils.newComplianceException(XPackPlugin.SECURITY); + throw LicenseUtils.newComplianceException(XpackField.SECURITY); } if (licenseState.isAuthAllowed()) { @@ -116,7 +116,7 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil listener.onFailure(e); } } else if (SECURITY_ACTION_MATCHER.test(action)) { - listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.SECURITY)); + listener.onFailure(LicenseUtils.newComplianceException(XpackField.SECURITY)); } else { chain.proceed(task, action, request, listener); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java index b866e00d37d..724a8b901ae 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java @@ -15,8 +15,8 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.authz.store.ClientReservedRoles; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; -import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; public class TransportDeleteRoleAction extends HandledTransportAction { @@ -33,7 +33,7 @@ public class TransportDeleteRoleAction extends HandledTransportAction listener) { - if (ReservedRolesStore.isReserved(request.name())) { + if (ClientReservedRoles.isReserved(request.name())) { listener.onFailure(new IllegalArgumentException("role [" + request.name() + "] is reserved and cannot be deleted")); return; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java index 29097391896..b8734c41654 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.authz.store.ClientReservedRoles; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; @@ -45,7 +46,7 @@ public class TransportGetRolesAction extends HandledTransportAction listener) { final String name = request.roleDescriptor().getName(); - if (ReservedRolesStore.isReserved(name)) { + if (ClientReservedRoles.isReserved(name)) { listener.onFailure(new IllegalArgumentException("role [" + name + "] is reserved and cannot be modified.")); return; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java index 736f8301498..46eaf7f07ba 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java @@ -11,8 +11,8 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.threadpool.ThreadPool; @@ -35,7 +35,7 @@ public class TransportDeleteUserAction extends HandledTransportAction listener) { final String username = request.username(); - if (ReservedRealm.isReserved(username, settings)) { + if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be deleted")); return; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index e20b146396b..aa7a548ddf7 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.user.SystemUser; @@ -51,7 +52,7 @@ public class TransportGetUsersAction extends HandledTransportAction realmLookup = new ArrayList<>(); if (specificUsersRequested) { for (String username : requestedUsers) { - if (ReservedRealm.isReserved(username, settings)) { + if (ClientReservedRealm.isReserved(username, settings)) { realmLookup.add(username); } else if (SystemUser.NAME.equals(username) || XPackUser.NAME.equals(username)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is internal")); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java index 45de72a3fad..51a8ab270be 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java @@ -79,11 +79,11 @@ public class TransportHasPrivilegesAction extends HandledTransportAction predicateCache = new HashMap<>(); final Map indices = new LinkedHashMap<>(); - boolean allMatch = true; for (RoleDescriptor.IndicesPrivileges check : request.indexPrivileges()) { for (String index : check.getIndices()) { final Map privileges = new HashMap<>(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java index 10db43112a8..ac8e66dc7dc 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java @@ -15,8 +15,8 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.XPackUser; @@ -36,7 +36,7 @@ public class TransportPutUserAction extends HandledTransportAction listener) { final String username = request.username(); - if (ReservedRealm.isReserved(username, settings)) { + if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API")); return; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java b/plugin/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java index 5cf92b18116..152132a7ea5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java @@ -81,7 +81,7 @@ import java.util.function.Function; import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.ClientHelper.clientWithOrigin; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_DENIED; import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_GRANTED; import static org.elasticsearch.xpack.security.audit.AuditLevel.ANONYMOUS_ACCESS_DENIED; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/plugin/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index af4bbb63bae..203574ccd34 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -44,7 +44,7 @@ import java.util.function.Function; import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; import static org.elasticsearch.common.Strings.arrayToCommaDelimitedString; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_DENIED; import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_GRANTED; import static org.elasticsearch.xpack.security.audit.AuditLevel.ANONYMOUS_ACCESS_DENIED; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/Authentication.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/Authentication.java index 18ecfae7dc2..4a8b051d295 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/Authentication.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/Authentication.java @@ -11,16 +11,17 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.xpack.security.user.InternalUserSerializationHelper; import org.elasticsearch.xpack.security.user.User; import java.io.IOException; import java.util.Base64; import java.util.Objects; +// TODO(hub-cap) Clean this up after moving User over - This class can re-inherit its field AUTHENTICATION_KEY in AuthenticationField. +// That interface can be removed public class Authentication { - public static final String AUTHENTICATION_KEY = "_xpack_security_authentication"; - private final User user; private final RealmRef authenticatedBy; private final RealmRef lookedUpBy; @@ -38,7 +39,7 @@ public class Authentication { } public Authentication(StreamInput in) throws IOException { - this.user = User.readFrom(in); + this.user = InternalUserSerializationHelper.readFrom(in); this.authenticatedBy = new RealmRef(in); if (in.readBoolean()) { this.lookedUpBy = new RealmRef(in); @@ -66,13 +67,13 @@ public class Authentication { public static Authentication readFromContext(ThreadContext ctx) throws IOException, IllegalArgumentException { - Authentication authentication = ctx.getTransient(AUTHENTICATION_KEY); + Authentication authentication = ctx.getTransient(AuthenticationField.AUTHENTICATION_KEY); if (authentication != null) { - assert ctx.getHeader(AUTHENTICATION_KEY) != null; + assert ctx.getHeader(AuthenticationField.AUTHENTICATION_KEY) != null; return authentication; } - String authenticationHeader = ctx.getHeader(AUTHENTICATION_KEY); + String authenticationHeader = ctx.getHeader(AuthenticationField.AUTHENTICATION_KEY); if (authenticationHeader == null) { return null; } @@ -80,19 +81,19 @@ public class Authentication { } public static Authentication getAuthentication(ThreadContext context) { - return context.getTransient(Authentication.AUTHENTICATION_KEY); + return context.getTransient(AuthenticationField.AUTHENTICATION_KEY); } static Authentication deserializeHeaderAndPutInContext(String header, ThreadContext ctx) throws IOException, IllegalArgumentException { - assert ctx.getTransient(AUTHENTICATION_KEY) == null; + assert ctx.getTransient(AuthenticationField.AUTHENTICATION_KEY) == null; byte[] bytes = Base64.getDecoder().decode(header); StreamInput input = StreamInput.wrap(bytes); Version version = Version.readVersion(input); input.setVersion(version); Authentication authentication = new Authentication(input); - ctx.putTransient(AUTHENTICATION_KEY, authentication); + ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); return authentication; } @@ -104,13 +105,13 @@ public class Authentication { throws IOException, IllegalArgumentException { ensureContextDoesNotContainAuthentication(ctx); String header = encode(); - ctx.putTransient(AUTHENTICATION_KEY, this); - ctx.putHeader(AUTHENTICATION_KEY, header); + ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, this); + ctx.putHeader(AuthenticationField.AUTHENTICATION_KEY, header); } void ensureContextDoesNotContainAuthentication(ThreadContext ctx) { - if (ctx.getTransient(AUTHENTICATION_KEY) != null) { - if (ctx.getHeader(AUTHENTICATION_KEY) == null) { + if (ctx.getTransient(AuthenticationField.AUTHENTICATION_KEY) != null) { + if (ctx.getHeader(AuthenticationField.AUTHENTICATION_KEY) == null) { throw new IllegalStateException("authentication present as a transient but not a header"); } throw new IllegalStateException("authentication is already present in the context"); @@ -126,7 +127,7 @@ public class Authentication { } void writeTo(StreamOutput out) throws IOException { - User.writeTo(user, out); + InternalUserSerializationHelper.writeTo(user, out); authenticatedBy.writeTo(out); if (lookedUpBy != null) { out.writeBoolean(true); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index e6424147ed8..a21dcb66606 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.authc; -import java.net.InetSocketAddress; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -21,9 +20,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestRequest; @@ -39,8 +36,6 @@ import org.elasticsearch.xpack.security.support.Exceptions; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.User; -import static org.elasticsearch.xpack.security.Security.setting; - /** * An authentication service that delegates the authentication process to its configured {@link Realm realms}. * This service also supports request level caching of authenticated users (i.e. once a user authenticated @@ -48,10 +43,6 @@ import static org.elasticsearch.xpack.security.Security.setting; */ public class AuthenticationService extends AbstractComponent { - public static final Setting RUN_AS_ENABLED = - Setting.boolSetting(setting("authc.run_as.enabled"), true, Property.NodeScope); - public static final String RUN_AS_USER_HEADER = "es-security-runas-user"; - private final Realms realms; private final AuditTrail auditTrail; private final AuthenticationFailureHandler failureHandler; @@ -72,7 +63,7 @@ public class AuthenticationService extends AbstractComponent { this.failureHandler = failureHandler; this.threadContext = threadPool.getThreadContext(); this.anonymousUser = anonymousUser; - this.runAsEnabled = RUN_AS_ENABLED.get(settings); + this.runAsEnabled = AuthenticationServiceField.RUN_AS_ENABLED.get(settings); this.isAnonymousUserEnabled = AnonymousUser.isAnonymousEnabled(settings); this.tokenService = tokenService; } @@ -367,7 +358,7 @@ public class AuthenticationService extends AbstractComponent { listener.onFailure(request.authenticationFailed(authenticationToken)); } else { if (runAsEnabled) { - final String runAsUsername = threadContext.getHeader(RUN_AS_USER_HEADER); + final String runAsUsername = threadContext.getHeader(AuthenticationServiceField.RUN_AS_USER_HEADER); if (runAsUsername != null && runAsUsername.isEmpty() == false) { lookupRunAsUser(user, runAsUsername, this::finishAuthentication); } else if (runAsUsername == null) { @@ -605,6 +596,6 @@ public class AuthenticationService extends AbstractComponent { } public static void addSettings(List> settings) { - settings.add(RUN_AS_ENABLED); + settings.add(AuthenticationServiceField.RUN_AS_ENABLED); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmSettings.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmSettings.java index 0b71d4faebd..619beff9f2c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmSettings.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmSettings.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.extensions.XPackExtension; import static org.elasticsearch.common.Strings.isNullOrEmpty; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; /** * Configures the {@link Setting#groupSetting(String, Consumer, Setting.Property...) group setting} for security diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 0b59231a6a1..83f87aff1b0 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -46,8 +46,8 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.SecurityLifecycleService; import javax.crypto.Cipher; @@ -109,13 +109,13 @@ public final class TokenService extends AbstractComponent { private static final int IV_BYTES = 12; private static final int VERSION_BYTES = 4; private static final String ENCRYPTION_CIPHER = "AES/GCM/NoPadding"; - private static final String EXPIRED_TOKEN_WWW_AUTH_VALUE = "Bearer realm=\"" + XPackPlugin.SECURITY + + private static final String EXPIRED_TOKEN_WWW_AUTH_VALUE = "Bearer realm=\"" + XpackField.SECURITY + "\", error=\"invalid_token\", error_description=\"The access token expired\""; - private static final String MALFORMED_TOKEN_WWW_AUTH_VALUE = "Bearer realm=\"" + XPackPlugin.SECURITY + + private static final String MALFORMED_TOKEN_WWW_AUTH_VALUE = "Bearer realm=\"" + XpackField.SECURITY + "\", error=\"invalid_token\", error_description=\"The access token is malformed\""; private static final String TYPE = "doc"; - public static final String THREAD_POOL_NAME = XPackPlugin.SECURITY + "-token-key"; + public static final String THREAD_POOL_NAME = XpackField.SECURITY + "-token-key"; public static final Setting TOKEN_EXPIRATION = Setting.timeSetting("xpack.security.authc.token.timeout", TimeValue.timeValueMinutes(20L), TimeValue.timeValueSeconds(1L), Property.NodeScope); public static final Setting DELETE_INTERVAL = Setting.timeSetting("xpack.security.authc.token.delete.interval", diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java index a03beafb08d..5b47891fa2b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java @@ -20,7 +20,6 @@ import org.apache.logging.log4j.core.config.LoggerConfig; import org.apache.logging.log4j.core.layout.PatternLayout; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cli.LoggingAwareMultiCommand; -import org.elasticsearch.cli.MultiCommand; import org.elasticsearch.cli.EnvironmentAwareCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.Terminal.Verbosity; @@ -67,7 +66,7 @@ import java.util.Set; import javax.net.ssl.HttpsURLConnection; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; /** * This is the command-line tool used for migrating users and roles from the file-based realm into the new native realm using the API for diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index f28736efc03..98b7b2964fa 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -35,7 +35,7 @@ import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XPackClientActionPlugin; import org.elasticsearch.xpack.security.ScrollHelper; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheRequest; @@ -87,7 +87,7 @@ public class NativeUsersStore extends AbstractComponent { public NativeUsersStore(Settings settings, Client client, SecurityLifecycleService securityLifecycleService) { super(settings); this.client = client; - this.isTribeNode = XPackPlugin.isTribeNode(settings); + this.isTribeNode = XPackClientActionPlugin.isTribeNode(settings); this.securityLifecycleService = securityLifecycleService; } @@ -221,7 +221,7 @@ public class NativeUsersStore extends AbstractComponent { } final String docType; - if (ReservedRealm.isReserved(username, settings)) { + if (ClientReservedRealm.isReserved(username, settings)) { docType = RESERVED_USER_TYPE; } else { docType = USER_DOC_TYPE; @@ -425,7 +425,7 @@ public class NativeUsersStore extends AbstractComponent { return; } - if (ReservedRealm.isReserved(username, settings)) { + if (ClientReservedRealm.isReserved(username, settings)) { setReservedUserEnabled(username, enabled, refreshPolicy, true, listener); } else { setRegularUserEnabled(username, enabled, refreshPolicy, listener); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index ff44d631254..a43ead0ec88 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.security.Security; +import org.elasticsearch.xpack.security.SecurityField; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; @@ -52,7 +52,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { static final ReservedUserInfo ENABLED_DEFAULT_USER_INFO = new ReservedUserInfo(EMPTY_PASSWORD_HASH, true, true); public static final Setting ACCEPT_DEFAULT_PASSWORD_SETTING = Setting.boolSetting( - Security.setting("authc.accept_default_password"), true, Setting.Property.NodeScope, Setting.Property.Filtered, + SecurityField.setting("authc.accept_default_password"), true, Setting.Property.NodeScope, Setting.Property.Filtered, Setting.Property.Deprecated); public static final Setting BOOTSTRAP_ELASTIC_PASSWORD = SecureSetting.secureString("bootstrap.password", KeyStoreWrapper.SEED_SETTING); @@ -80,7 +80,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { if (realmEnabled == false) { listener.onResponse(AuthenticationResult.notHandled()); - } else if (isReserved(token.principal(), config.globalSettings()) == false) { + } else if (ClientReservedRealm.isReserved(token.principal(), config.globalSettings()) == false) { listener.onResponse(AuthenticationResult.notHandled()); } else { getUserInfo(token.principal(), ActionListener.wrap((userInfo) -> { @@ -116,7 +116,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { listener.onResponse(anonymousUser); } listener.onResponse(null); - } else if (isReserved(username, config.globalSettings()) == false) { + } else if (ClientReservedRealm.isReserved(username, config.globalSettings()) == false) { listener.onResponse(null); } else if (AnonymousUser.isAnonymousUsername(username, config.globalSettings())) { listener.onResponse(anonymousEnabled ? anonymousUser : null); @@ -132,18 +132,6 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { } } - public static boolean isReserved(String username, Settings settings) { - assert username != null; - switch (username) { - case ElasticUser.NAME: - case KibanaUser.NAME: - case LogstashSystemUser.NAME: - return XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); - default: - return AnonymousUser.isAnonymousUsername(username, settings); - } - } - private User getUser(String username, ReservedUserInfo userInfo) { assert username != null; switch (username) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClient.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClient.java index 2b7acc14954..5cfb909aa66 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClient.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClient.java @@ -38,7 +38,7 @@ import java.util.List; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PORT; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_HOST; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; /** * A simple http client for usage in command line tools. This client only uses internal jdk classes and does diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/tool/UsersTool.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/tool/UsersTool.java index b7b918b8ebd..1f25f403753 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/tool/UsersTool.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/tool/UsersTool.java @@ -17,8 +17,8 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.authc.file.FileUserPasswdStore; import org.elasticsearch.xpack.security.authc.file.FileUserRolesStore; import org.elasticsearch.xpack.security.authc.support.Hasher; @@ -497,7 +497,7 @@ public class UsersTool extends LoggingAwareMultiCommand { * Ensure the X-Pack configuration directory exists as a child of $ES_CONF_DIR or return a helpful error message. */ private void checkConfigurationDir(Environment env) throws Exception { - Path configDir = env.configFile().resolve(XPackPlugin.NAME); + Path configDir = env.configFile().resolve(XpackField.NAME); if (Files.exists(configDir) == false) { throw new UserException(ExitCodes.CONFIG, String.format(Locale.ROOT, "Directory %s does not exist. Please ensure " + diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java index d030cd1eb18..dc7470d2f16 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java @@ -38,7 +38,6 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.elasticsearch.xpack.security.authc.support.CharArrays; import org.elasticsearch.xpack.ssl.SSLService; -import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -282,9 +281,7 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory { listener.onResponse(new LdapSession(logger, realm, connection, entry.getDN(), groupsResolver, metaDataResolver, timeout, null)); } - Arrays.fill(passwordBytes, (byte) 0); }, e -> { - Arrays.fill(passwordBytes, (byte) 0); listener.onFailure(e); })); } @@ -315,9 +312,7 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory { listener.onResponse( new LdapSession(logger, realm, pool, entry.getDN(), groupsResolver, metaDataResolver, timeout, null)); } - Arrays.fill(passwordBytes, (byte) 0); }, e -> { - Arrays.fill(passwordBytes, (byte) 0); listener.onFailure(e); })); } @@ -457,11 +452,9 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory { ActionListener.wrap( results -> { IOUtils.close(searchConnection); - Arrays.fill(passwordBytes, (byte) 0); handleSearchResults(results, netBiosDomainName, domainNameCache, listener); }, e -> { IOUtils.closeWhileHandlingException(searchConnection); - Arrays.fill(passwordBytes, (byte) 0); listener.onFailure(e); }), "ncname"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java index d489ca6c602..3a3a10dfd5d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java @@ -86,7 +86,6 @@ public class LdapSessionFactory extends SessionFactory { listener.onResponse( (new LdapSession(logger, config, connection, ((SimpleBindRequest) connection.getLastBindRequest()).getBindDN(), groupResolver, metaDataResolver, timeout, null))); - Arrays.fill(passwordBytes, (byte) 0); } @Override @@ -103,7 +102,6 @@ public class LdapSessionFactory extends SessionFactory { } else if (loopIndex == userDnTemplates.length) { // loop break IOUtils.closeWhileHandlingException(connection); - Arrays.fill(passwordBytes, (byte) 0); listener.onFailure(containerException); } else { loop(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java index 112244ce944..cb76c08969c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java @@ -30,8 +30,6 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.elasticsearch.xpack.security.authc.support.CharArrays; import org.elasticsearch.xpack.ssl.SSLService; -import java.util.Arrays; - import java.util.HashSet; import java.util.Set; import java.util.function.Function; @@ -109,12 +107,10 @@ class LdapUserSearchSessionFactory extends PoolingSessionFactory { protected void doRun() throws Exception { listener.onResponse(new LdapSession(logger, config, connectionPool, dn, groupResolver, metaDataResolver, timeout, entry.getAttributes())); - Arrays.fill(passwordBytes, (byte) 0); } @Override public void onFailure(Exception e) { - Arrays.fill(passwordBytes, (byte) 0); listener.onFailure(e); } }); @@ -155,12 +151,10 @@ class LdapUserSearchSessionFactory extends PoolingSessionFactory { protected void doRun() throws Exception { listener.onResponse(new LdapSession(logger, config, connection, dn, groupResolver, metaDataResolver, timeout, entry.getAttributes())); - Arrays.fill(passwordBytes, (byte) 0); } @Override public void onFailure(Exception e) { - Arrays.fill(passwordBytes, (byte) 0); IOUtils.closeWhileHandlingException(connection); listener.onFailure(e); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapSession.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapSession.java index 490e93874fe..c9d11e1a7a3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapSession.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapSession.java @@ -25,7 +25,7 @@ public class LdapSession implements Releasable { protected final Logger logger; protected final RealmConfig realm; - protected final LDAPInterface ldap; + protected final LDAPInterface connection; protected final String userDn; protected final GroupsResolver groupsResolver; private LdapMetaDataResolver metaDataResolver; @@ -44,7 +44,7 @@ public class LdapSession implements Releasable { LdapMetaDataResolver metaDataResolver, TimeValue timeout, Collection attributes) { this.logger = logger; this.realm = realm; - this.ldap = connection; + this.connection = connection; this.userDn = userDn; this.groupsResolver = groupsResolver; this.metaDataResolver = metaDataResolver; @@ -59,8 +59,8 @@ public class LdapSession implements Releasable { public void close() { // Only if it is an LDAPConnection do we need to close it, otherwise it is a connection pool and we will close all of the // connections in the pool - if (ldap instanceof LDAPConnection) { - ((LDAPConnection) ldap).close(); + if (connection instanceof LDAPConnection) { + ((LDAPConnection) connection).close(); } } @@ -78,15 +78,22 @@ public class LdapSession implements Releasable { return realm; } + /** + * @return the connection to the LDAP/AD server of this session + */ + public LDAPInterface getConnection() { + return connection; + } + /** * Asynchronously retrieves a list of group distinguished names */ public void groups(ActionListener> listener) { - groupsResolver.resolve(ldap, userDn, timeout, logger, attributes, listener); + groupsResolver.resolve(connection, userDn, timeout, logger, attributes, listener); } public void metaData(ActionListener> listener) { - metaDataResolver.resolve(ldap, userDn, timeout, logger, attributes, listener); + metaDataResolver.resolve(connection, userDn, timeout, logger, attributes, listener); } public void resolve(ActionListener listener) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 23fd13d8225..f2375a7710e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XPackClientActionPlugin; import org.elasticsearch.xpack.security.ScrollHelper; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheResponse; @@ -85,7 +85,7 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol public NativeRoleMappingStore(Settings settings, Client client, SecurityLifecycleService securityLifecycleService) { super(settings); this.client = client; - this.isTribeNode = XPackPlugin.isTribeNode(settings); + this.isTribeNode = XPackClientActionPlugin.isTribeNode(settings); this.securityLifecycleService = securityLifecycleService; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 998500fd795..f38d91481bf 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -56,6 +56,7 @@ import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.Role; import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.security.authz.store.ClientReservedRoles; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.support.Automatons; @@ -77,7 +78,7 @@ import java.util.Map; import java.util.Set; import java.util.function.Predicate; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; import static org.elasticsearch.xpack.security.support.Exceptions.authorizationError; public class AuthorizationService extends AbstractComponent { @@ -466,7 +467,7 @@ public class AuthorizationService extends AbstractComponent { if (roleNames.isEmpty()) { roleActionListener.onResponse(Role.EMPTY); - } else if (roleNames.contains(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName())) { + } else if (roleNames.contains(ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName())) { roleActionListener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); } else { rolesStore.roles(roleNames, fieldPermissionsCache, roleActionListener); @@ -587,7 +588,7 @@ public class AuthorizationService extends AbstractComponent { static boolean isSuperuser(User user) { return Arrays.stream(user.roles()) - .anyMatch(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()::equals); + .anyMatch(ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName()::equals); } public static void addSettings(List> settings) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 8a2c4f72a18..2a782c4456b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.ClientHelper; import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authc.Authentication; +import org.elasticsearch.xpack.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.authz.permission.Role; import org.elasticsearch.xpack.security.support.Automatons; import org.elasticsearch.xpack.security.user.SystemUser; @@ -58,7 +59,7 @@ public final class AuthorizationUtils { // there is no authentication object AND we are executing in a system context OR an internal action // AND there - Authentication authentication = threadContext.getTransient(Authentication.AUTHENTICATION_KEY); + Authentication authentication = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); if (authentication == null && threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME) == null) { return true; } @@ -81,7 +82,7 @@ public final class AuthorizationUtils { */ public static boolean shouldSetUserBasedOnActionOrigin(ThreadContext context) { final String actionOrigin = context.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME); - final Authentication authentication = context.getTransient(Authentication.AUTHENTICATION_KEY); + final Authentication authentication = context.getTransient(AuthenticationField.AUTHENTICATION_KEY); return actionOrigin != null && authentication == null; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java index 0917aad4f27..682abe47e66 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.Authentication; +import org.elasticsearch.xpack.security.authc.AuthenticationField; import static org.elasticsearch.xpack.security.authz.AuthorizationService.ORIGINATING_ACTION_KEY; import static org.elasticsearch.xpack.security.authz.AuthorizationService.ROLE_NAMES_KEY; @@ -45,7 +46,8 @@ public final class SecuritySearchOperationListener implements SearchOperationLis @Override public void onNewScrollContext(SearchContext searchContext) { if (licenseState.isAuthAllowed()) { - searchContext.scrollContext().putInContext(Authentication.AUTHENTICATION_KEY, Authentication.getAuthentication(threadContext)); + searchContext.scrollContext().putInContext(AuthenticationField.AUTHENTICATION_KEY, + Authentication.getAuthentication(threadContext)); } } @@ -57,7 +59,7 @@ public final class SecuritySearchOperationListener implements SearchOperationLis public void validateSearchContext(SearchContext searchContext, TransportRequest request) { if (licenseState.isAuthAllowed()) { if (searchContext.scrollContext() != null) { - final Authentication originalAuth = searchContext.scrollContext().getFromContext(Authentication.AUTHENTICATION_KEY); + final Authentication originalAuth = searchContext.scrollContext().getFromContext(AuthenticationField.AUTHENTICATION_KEY); final Authentication current = Authentication.getAuthentication(threadContext); final String action = threadContext.getTransient(ORIGINATING_ACTION_KEY); ensureAuthenticatedUserIsSame(originalAuth, current, auditTrailService, searchContext.id(), action, request, diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java index a03a29ab138..375d414c694 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java @@ -79,7 +79,7 @@ import static org.apache.lucene.search.BooleanClause.Occur.SHOULD; *

    * Based on the {@link ThreadContext} this class will enable field and/or document level security. *

    - * Field level security is enabled by wrapping the original {@link DirectoryReader} in a {@link FieldSubsetReader} + * ThrottlerField level security is enabled by wrapping the original {@link DirectoryReader} in a {@link FieldSubsetReader} * in the {@link #wrap(DirectoryReader)} method. *

    * Document level security is enabled by wrapping the original {@link DirectoryReader} in a {@link DocumentSubsetReader} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissions.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissions.java index 940dd18a8e6..e1f9d810352 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissions.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissions.java @@ -34,8 +34,8 @@ import static org.elasticsearch.xpack.security.support.Automatons.minusAndMinimi /** * Stores patterns to fields which access is granted or denied to and maintains an automaton that can be used to check if permission is * allowed for a specific field. - * Field permissions are configured via a list of strings that are patterns a field has to match. Two lists determine whether or not a - * field is granted access to: + * ThrottlerField permissions are configured via a list of strings that are patterns a field has to match. Two lists determine whether or + * not a field is granted access to: * 1. It has to match the patterns in grantedFieldsArray * 2. it must not match the patterns in deniedFieldsArray */ diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCache.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCache.java index ec10201a4a0..4b0143c72eb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCache.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsCache.java @@ -23,7 +23,7 @@ import java.util.concurrent.ExecutionException; import java.util.function.Predicate; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; /** * A service for managing the caching of {@link FieldPermissions} as these may often need to be combined or created and internally they diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsDefinition.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsDefinition.java index 6bf1e0e4e5d..74ba204c182 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsDefinition.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsDefinition.java @@ -10,7 +10,7 @@ import java.util.Collections; import java.util.Set; /** - * Represents the definition of a {@link FieldPermissions}. Field permissions are defined as a + * Represents the definition of a {@link FieldPermissions}. ThrottlerField permissions are defined as a * collections of grant and exclude definitions where the exclude definition must be a subset of * the grant definition. */ diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index c9aab56212f..f238d3bc2be 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -51,7 +51,7 @@ import org.elasticsearch.xpack.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.security.authz.privilege.Privilege; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; /** * A composite roles store that combines built in roles, file-based roles, and index-based roles. Checks the built in roles first, then the diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 27dd51c2989..9dd9b97b1fb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -143,7 +143,7 @@ public class FileRolesStore extends AbstractComponent { for (String segment : roleSegments) { RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings); if (descriptor != null) { - if (ReservedRolesStore.isReserved(descriptor.getName())) { + if (ClientReservedRoles.isReserved(descriptor.getName())) { logger.warn("role [{}] is reserved. the relevant role definition in the mapping file will be ignored", descriptor.getName()); } else if (flsDlsLicensed == false && descriptor.isUsingDocumentOrFieldLevelSecurity()) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index f6ac62e4fbb..4b48f071cea 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -37,7 +37,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XPackClientActionPlugin; import org.elasticsearch.xpack.security.ScrollHelper; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.action.role.ClearRolesCacheRequest; @@ -64,7 +64,7 @@ import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; import static org.elasticsearch.xpack.security.authz.RoleDescriptor.ROLE_TYPE; /** @@ -95,7 +95,7 @@ public class NativeRolesStore extends AbstractComponent { SecurityLifecycleService securityLifecycleService) { super(settings); this.client = client; - this.isTribeNode = XPackPlugin.isTribeNode(settings); + this.isTribeNode = XPackClientActionPlugin.isTribeNode(settings); this.securityClient = new SecurityClient(client); this.licenseState = licenseState; this.securityLifecycleService = securityLifecycleService; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java index d75f4c72c09..408111c8744 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java @@ -7,60 +7,31 @@ package org.elasticsearch.xpack.security.authz.store; import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authz.permission.Role; -import org.elasticsearch.xpack.security.SecurityExtension; -import org.elasticsearch.xpack.security.support.MetadataUtils; -import org.elasticsearch.xpack.security.user.SystemUser; -import org.elasticsearch.xpack.security.user.XPackUser; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.Map; -import java.util.ServiceLoader; import java.util.Set; public class ReservedRolesStore { - public static final RoleDescriptor SUPERUSER_ROLE_DESCRIPTOR = new RoleDescriptor("superuser", - new String[] { "all" }, - new RoleDescriptor.IndicesPrivileges[] { - RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build()}, - new String[] { "*" }, - MetadataUtils.DEFAULT_RESERVED_METADATA); - public static final Role SUPERUSER_ROLE = Role.builder(SUPERUSER_ROLE_DESCRIPTOR, null).build(); - private static final Map RESERVED_ROLES = initializeReservedRoles(); - - private static Map initializeReservedRoles() { - Map roles = new HashMap<>(); - - roles.put("superuser", SUPERUSER_ROLE_DESCRIPTOR); - - // Services are loaded through SPI, and are defined in their META-INF/services - for(SecurityExtension ext : ServiceLoader.load(SecurityExtension.class, SecurityExtension.class.getClassLoader())) { - roles.putAll(ext.getReservedRoles()); - } - - return Collections.unmodifiableMap(roles); - } + public static final Role SUPERUSER_ROLE = Role.builder(ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR, null).build(); public Map usageStats() { return Collections.emptyMap(); } public RoleDescriptor roleDescriptor(String role) { - return RESERVED_ROLES.get(role); + return ClientReservedRoles.RESERVED_ROLES.get(role); } public Collection roleDescriptors() { - return RESERVED_ROLES.values(); + return ClientReservedRoles.RESERVED_ROLES.values(); } public static Set names() { - return RESERVED_ROLES.keySet(); - } - - public static boolean isReserved(String role) { - return RESERVED_ROLES.containsKey(role) || SystemUser.ROLE_NAME.equals(role) || XPackUser.ROLE_NAME.equals(role); + return ClientReservedRoles.RESERVED_ROLES.keySet(); } } + diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyTool.java b/plugin/src/main/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyTool.java index 3d41c21624b..23b912fb278 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyTool.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyTool.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; @@ -68,7 +68,7 @@ public class SystemKeyTool extends EnvironmentAwareCommand { } keyPath = parsePath(args.get(0)); } else { - keyPath = env.configFile().resolve(XPackPlugin.NAME).resolve("system_key"); + keyPath = env.configFile().resolve(XpackField.NAME).resolve("system_key"); } // write the key diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java b/plugin/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java index ee2670d9935..4a4d8772c09 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java @@ -15,7 +15,7 @@ import org.elasticsearch.rest.RestRequest; import java.io.IOException; -import static org.elasticsearch.xpack.XPackPlugin.SECURITY; +import static org.elasticsearch.xpack.XpackField.SECURITY; /** * Base class for security rest handlers. This handler takes care of ensuring that the license diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java index 263836cb12d..c49e0109e7f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestGetRoleMappingsAction.java @@ -25,7 +25,7 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; /** - * Rest endpoint to retrieve a role-mapping from the {@link org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore} + * Rest endpoint to retrieve a role-mapping from the org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ public class RestGetRoleMappingsAction extends SecurityBaseRestHandler { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index b83dfcb7ed8..2582f5a2cd8 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -45,7 +45,7 @@ import java.util.Map; import java.util.concurrent.Executor; import java.util.function.Function; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; public class SecurityServerTransportInterceptor extends AbstractComponent implements TransportInterceptor { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java index 24ebac9e1d2..719d052889e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java @@ -32,7 +32,7 @@ import java.util.function.Function; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; public class IPFilter { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4Transport.java b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4Transport.java index 55ad3b2fd52..ce31e2f95d9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4Transport.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4Transport.java @@ -22,7 +22,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.netty4.Netty4Transport; -import org.elasticsearch.transport.netty4.NettyTcpChannel; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.ssl.SSLConfiguration; import org.elasticsearch.xpack.ssl.SSLService; @@ -36,7 +35,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.xpack.security.Security.setting; +import static org.elasticsearch.xpack.security.SecurityField.setting; import static org.elasticsearch.xpack.security.transport.SSLExceptionHelper.isCloseDuringHandshakeException; import static org.elasticsearch.xpack.security.transport.SSLExceptionHelper.isNotSslRecordException; import static org.elasticsearch.xpack.security.transport.SSLExceptionHelper.isReceivedCertificateUnknownException; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java b/plugin/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java index a5149486d23..85fb6945089 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java @@ -14,11 +14,12 @@ import org.elasticsearch.xpack.security.support.MetadataUtils; */ public class ElasticUser extends User { - public static final String NAME = "elastic"; + public static final String NAME = UsernamesField.ELASTIC_NAME; // used for testing in a different package - public static final String ROLE_NAME = "superuser"; + public static final String ROLE_NAME = UsernamesField.ELASTIC_ROLE; public ElasticUser(boolean enabled) { super(NAME, new String[] { ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); } } + diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/InternalUserSerializationHelper.java b/plugin/src/main/java/org/elasticsearch/xpack/security/user/InternalUserSerializationHelper.java new file mode 100644 index 00000000000..2c3c164eaa4 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/user/InternalUserSerializationHelper.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.user; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class InternalUserSerializationHelper { + public static User readFrom(StreamInput input) throws IOException { + final boolean isInternalUser = input.readBoolean(); + final String username = input.readString(); + if (isInternalUser) { + if (SystemUser.is(username)) { + return SystemUser.INSTANCE; + } else if (XPackUser.is(username)) { + return XPackUser.INSTANCE; + } else if (XPackSecurityUser.is(username)) { + return XPackSecurityUser.INSTANCE; + } + throw new IllegalStateException("user [" + username + "] is not an internal user"); + } + return User.partialReadFrom(username, input); + } + public static void writeTo(User user, StreamOutput output) throws IOException { + if (SystemUser.is(user)) { + output.writeBoolean(true); + output.writeString(SystemUser.NAME); + } else if (XPackUser.is(user)) { + output.writeBoolean(true); + output.writeString(XPackUser.NAME); + } else if (XPackSecurityUser.is(user)) { + output.writeBoolean(true); + output.writeString(XPackSecurityUser.NAME); + } else { + User.writeTo(user, output); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java b/plugin/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java index 8edf134b891..861279277a5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/user/KibanaUser.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.security.support.MetadataUtils; */ public class KibanaUser extends User { - public static final String NAME = "kibana"; - public static final String ROLE_NAME = "kibana_system"; + public static final String NAME = UsernamesField.KIBANA_NAME; + public static final String ROLE_NAME = UsernamesField.KIBANA_ROLE; public KibanaUser(boolean enabled) { super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/LogstashSystemUser.java b/plugin/src/main/java/org/elasticsearch/xpack/security/user/LogstashSystemUser.java index be4eca86e2d..9f613955dff 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/LogstashSystemUser.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/user/LogstashSystemUser.java @@ -13,8 +13,8 @@ import org.elasticsearch.xpack.security.support.MetadataUtils; */ public class LogstashSystemUser extends User { - public static final String NAME = "logstash_system"; - public static final String ROLE_NAME = "logstash_system"; + public static final String NAME = UsernamesField.LOGSTASH_NAME; + public static final String ROLE_NAME = UsernamesField.LOGSTASH_ROLE; public static final Version DEFINED_SINCE = Version.V_5_2_0; public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/SystemUser.java b/plugin/src/main/java/org/elasticsearch/xpack/security/user/SystemUser.java index c571b8dabd1..c74ab069ded 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/SystemUser.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/user/SystemUser.java @@ -14,8 +14,8 @@ import java.util.function.Predicate; */ public class SystemUser extends User { - public static final String NAME = "_system"; - public static final String ROLE_NAME = "_system"; + public static final String NAME = UsernamesField.SYSTEM_NAME; + public static final String ROLE_NAME = UsernamesField.SYSTEM_ROLE; public static final User INSTANCE = new SystemUser(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/XPackSecurityUser.java b/plugin/src/main/java/org/elasticsearch/xpack/security/user/XPackSecurityUser.java index b4c0cc790e9..dfac8ea1d41 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/XPackSecurityUser.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/user/XPackSecurityUser.java @@ -10,9 +10,9 @@ package org.elasticsearch.xpack.security.user; */ public class XPackSecurityUser extends User { - public static final String NAME = "_xpack_security"; + public static final String NAME = UsernamesField.XPACK_SECURITY_NAME; public static final XPackSecurityUser INSTANCE = new XPackSecurityUser(); - private static final String ROLE_NAME = "superuser"; + private static final String ROLE_NAME = UsernamesField.XPACK_SECURITY_ROLE; private XPackSecurityUser() { super(NAME, ROLE_NAME); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java b/plugin/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java index 67e71ddf88a..a085f6d8cbb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/user/XPackUser.java @@ -15,8 +15,8 @@ import org.elasticsearch.xpack.security.support.MetadataUtils; */ public class XPackUser extends User { - public static final String NAME = "_xpack"; - public static final String ROLE_NAME = NAME; + public static final String NAME = UsernamesField.XPACK_NAME; + public static final String ROLE_NAME = UsernamesField.XPACK_ROLE; public static final Role ROLE = Role.builder(new RoleDescriptor(ROLE_NAME, new String[] { "all" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices("/@&~(\\.security.*)/").privileges("all").build(), diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java index ca8c63905f9..1b75291b9b6 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java @@ -52,7 +52,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.common.socket.SocketAccess; -import org.elasticsearch.xpack.security.Security; +import org.elasticsearch.xpack.security.SecurityField; import org.elasticsearch.xpack.ssl.cert.CertificateInfo; /** @@ -809,7 +809,7 @@ public class SSLService extends AbstractComponent { private static List getRealmsSSLSettings(Settings settings) { List sslSettings = new ArrayList<>(); - Settings realmsSettings = settings.getByPrefix(Security.setting("authc.realms.")); + Settings realmsSettings = settings.getByPrefix(SecurityField.setting("authc.realms.")); for (String name : realmsSettings.names()) { Settings realmSSLSettings = realmsSettings.getAsSettings(name).getByPrefix("ssl."); if (realmSSLSettings.isEmpty() == false) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/action/TransportGetCertificateInfoAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/action/TransportGetCertificateInfoAction.java new file mode 100644 index 00000000000..ca99436bce6 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/action/TransportGetCertificateInfoAction.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ssl.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ssl.SSLService; +import org.elasticsearch.xpack.ssl.cert.CertificateInfo; + +import java.io.IOException; +import java.security.GeneralSecurityException; +import java.util.Collection; + +public class TransportGetCertificateInfoAction extends HandledTransportAction { + + private final SSLService sslService; + + @Inject + public TransportGetCertificateInfoAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + SSLService sslService) { + super(settings, GetCertificateInfoAction.NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, GetCertificateInfoAction.Request::new); + this.sslService = sslService; + } + + @Override + protected void doExecute(GetCertificateInfoAction.Request request, + ActionListener listener) { + try { + Collection certificates = sslService.getLoadedCertificates(); + listener.onResponse(new GetCertificateInfoAction.Response(certificates)); + } catch (GeneralSecurityException | IOException e) { + listener.onFailure(e); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java index 491fb39214b..658ce4aae63 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java @@ -23,7 +23,6 @@ import java.util.List; import java.util.Map; public class IndexUpgradeService extends AbstractComponent { - public static final IndicesOptions UPGRADE_INDEX_OPTIONS = IndicesOptions.strictSingleIndexNoExpandForbidClosed(); private final List upgradeChecks; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java index ca61ba416ea..03ef92a37a9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java @@ -26,6 +26,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeAction; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction; +import org.elasticsearch.xpack.upgrade.actions.TransportIndexUpgradeAction; +import org.elasticsearch.xpack.upgrade.actions.TransportIndexUpgradeInfoAction; import org.elasticsearch.xpack.upgrade.rest.RestIndexUpgradeAction; import org.elasticsearch.xpack.upgrade.rest.RestIndexUpgradeInfoAction; @@ -66,8 +68,8 @@ public class Upgrade implements ActionPlugin { @Override public List> getActions() { return Arrays.asList( - new ActionHandler<>(IndexUpgradeInfoAction.INSTANCE, IndexUpgradeInfoAction.TransportAction.class), - new ActionHandler<>(IndexUpgradeAction.INSTANCE, IndexUpgradeAction.TransportAction.class) + new ActionHandler<>(IndexUpgradeInfoAction.INSTANCE, TransportIndexUpgradeInfoAction.class), + new ActionHandler<>(IndexUpgradeAction.INSTANCE, TransportIndexUpgradeAction.class) ); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeAction.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeAction.java new file mode 100644 index 00000000000..1cca3e18537 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeAction.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.upgrade.actions; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.upgrade.IndexUpgradeService; + +public class TransportIndexUpgradeAction extends TransportMasterNodeAction { + + private final IndexUpgradeService indexUpgradeService; + + @Inject + public TransportIndexUpgradeAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexUpgradeService indexUpgradeService, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, IndexUpgradeAction.NAME, transportService, clusterService, threadPool, actionFilters, + IndexUpgradeAction.Request::new, indexNameExpressionResolver); + this.indexUpgradeService = indexUpgradeService; + } + + @Override + protected String executor() { + return ThreadPool.Names.GENERIC; + } + + @Override + protected BulkByScrollResponse newResponse() { + return new BulkByScrollResponse(); + } + + @Override + protected ClusterBlockException checkBlock(IndexUpgradeAction.Request request, ClusterState state) { + // Cluster is not affected but we look up repositories in metadata + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } + + @Override + protected final void masterOperation(Task task, IndexUpgradeAction.Request request, ClusterState state, + ActionListener listener) { + TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); + indexUpgradeService.upgrade(taskId, request.index(), state, listener); + } + + @Override + protected final void masterOperation(IndexUpgradeAction.Request request, ClusterState state, + ActionListener listener) { + throw new UnsupportedOperationException("the task parameter is required"); + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeInfoAction.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeInfoAction.java new file mode 100644 index 00000000000..25a419fca1e --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeInfoAction.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.upgrade.actions; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.XpackField; +import org.elasticsearch.xpack.upgrade.IndexUpgradeService; +import org.elasticsearch.xpack.upgrade.UpgradeActionRequired; + +import java.util.Map; + +public class TransportIndexUpgradeInfoAction extends TransportMasterNodeReadAction { + + private final IndexUpgradeService indexUpgradeService; + private final XPackLicenseState licenseState; + + + @Inject + public TransportIndexUpgradeInfoAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexUpgradeService indexUpgradeService, + IndexNameExpressionResolver indexNameExpressionResolver, + XPackLicenseState licenseState) { + super(settings, IndexUpgradeInfoAction.NAME, transportService, clusterService, threadPool, actionFilters, + IndexUpgradeInfoAction.Request::new, indexNameExpressionResolver); + this.indexUpgradeService = indexUpgradeService; + this.licenseState = licenseState; + } + + @Override + protected String executor() { + return ThreadPool.Names.GENERIC; + } + + @Override + protected IndexUpgradeInfoAction.Response newResponse() { + return new IndexUpgradeInfoAction.Response(); + } + + @Override + protected ClusterBlockException checkBlock(IndexUpgradeInfoAction.Request request, ClusterState state) { + // Cluster is not affected but we look up repositories in metadata + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } + + @Override + protected final void masterOperation(final IndexUpgradeInfoAction.Request request, ClusterState state, + final ActionListener listener) { + if (licenseState.isUpgradeAllowed()) { + Map results = + indexUpgradeService.upgradeInfo(request.indices(), request.indicesOptions(), state); + listener.onResponse(new IndexUpgradeInfoAction.Response(results)); + } else { + listener.onFailure(LicenseUtils.newComplianceException(XpackField.UPGRADE)); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/EncryptSensitiveDataBootstrapCheck.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/EncryptSensitiveDataBootstrapCheck.java index 84fc4978c31..792658072d4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/EncryptSensitiveDataBootstrapCheck.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/EncryptSensitiveDataBootstrapCheck.java @@ -7,9 +7,8 @@ package org.elasticsearch.xpack.watcher; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import java.nio.file.Files; import java.nio.file.Path; @@ -25,19 +24,19 @@ final class EncryptSensitiveDataBootstrapCheck implements BootstrapCheck { @Override public BootstrapCheckResult check(BootstrapContext context) { if (Watcher.ENCRYPT_SENSITIVE_DATA_SETTING.get(context.settings) - && Watcher.ENCRYPTION_KEY_SETTING.exists(context.settings) == false) { - final Path systemKeyPath = environment.configFile().resolve(XPackPlugin.NAME).resolve("system_key").toAbsolutePath(); + && WatcherField.ENCRYPTION_KEY_SETTING.exists(context.settings) == false) { + final Path systemKeyPath = environment.configFile().resolve(XpackField.NAME).resolve("system_key").toAbsolutePath(); final String message; if (Files.exists(systemKeyPath)) { message = "Encryption of sensitive data requires the key to be placed in the secure setting store. Run " + - "'bin/elasticsearch-keystore add-file " + Watcher.ENCRYPTION_KEY_SETTING.getKey() + " " + + "'bin/elasticsearch-keystore add-file " + WatcherField.ENCRYPTION_KEY_SETTING.getKey() + " " + systemKeyPath + "' to import the file.\nAfter importing, the system_key file should be removed from the " + "filesystem.\nRepeat this on every node in the cluster."; } else { message = "Encryption of sensitive data requires a key to be placed in the secure setting store. First run the " + "bin/x-pack/syskeygen tool to generate a key file.\nThen run 'bin/elasticsearch-keystore add-file " + - Watcher.ENCRYPTION_KEY_SETTING.getKey() + " " + + WatcherField.ENCRYPTION_KEY_SETTING.getKey() + " " + systemKeyPath + "' to import the key into" + " the secure setting store. Finally, remove the system_key file from the filesystem.\n" + "Repeat this on every node in the cluster"; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index c83825c305f..ebe8676b8b5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; -import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; @@ -51,6 +50,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.ssl.SSLService; import org.elasticsearch.xpack.watcher.actions.ActionFactory; import org.elasticsearch.xpack.watcher.actions.ActionRegistry; @@ -78,7 +78,7 @@ import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry; import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuth; import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuthFactory; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.ArrayCompareCondition; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.condition.ConditionFactory; @@ -167,11 +167,11 @@ import org.elasticsearch.xpack.watcher.trigger.schedule.WeeklySchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.YearlySchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.engine.TickerScheduleTriggerEngine; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.IOException; -import java.io.InputStream; import java.io.UncheckedIOException; import java.time.Clock; import java.util.ArrayList; @@ -202,7 +202,6 @@ public class Watcher implements ActionPlugin { new Setting<>("index.xpack.watcher.template.version", "", Function.identity(), Setting.Property.IndexScope); public static final Setting ENCRYPT_SENSITIVE_DATA_SETTING = Setting.boolSetting("xpack.watcher.encrypt_sensitive_data", false, Setting.Property.NodeScope); - public static final Setting ENCRYPTION_KEY_SETTING = SecureSetting.secureFile("xpack.watcher.encryption_key", null); public static final Setting MAX_STOP_TIMEOUT_SETTING = Setting.timeSetting("xpack.watcher.stop.timeout", TimeValue.timeValueSeconds(30), Setting.Property.NodeScope); @@ -225,7 +224,7 @@ public class Watcher implements ActionPlugin { List entries = new ArrayList<>(); entries.add(new NamedWriteableRegistry.Entry(MetaData.Custom.class, WatcherMetaData.TYPE, WatcherMetaData::new)); entries.add(new NamedWriteableRegistry.Entry(NamedDiff.class, WatcherMetaData.TYPE, WatcherMetaData::readDiffFrom)); - entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackPlugin.WATCHER, WatcherFeatureSet.Usage::new)); + entries.add(new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XpackField.WATCHER, WatcherFeatureSet.Usage::new)); return entries; } @@ -293,7 +292,7 @@ public class Watcher implements ActionPlugin { // conditions final Map parsers = new HashMap<>(); - parsers.put(AlwaysCondition.TYPE, (c, id, p) -> AlwaysCondition.parse(id, p)); + parsers.put(InternalAlwaysCondition.TYPE, (c, id, p) -> InternalAlwaysCondition.parse(id, p)); parsers.put(NeverCondition.TYPE, (c, id, p) -> NeverCondition.parse(id, p)); parsers.put(ArrayCompareCondition.TYPE, ArrayCompareCondition::parse); parsers.put(CompareCondition.TYPE, CompareCondition::parse); @@ -354,7 +353,7 @@ public class Watcher implements ActionPlugin { final WatcherSearchTemplateService watcherSearchTemplateService = new WatcherSearchTemplateService(settings, scriptService, xContentRegistry); final WatchExecutor watchExecutor = getWatchExecutor(threadPool); - final Watch.Parser watchParser = new Watch.Parser(settings, triggerService, registry, inputRegistry, cryptoService, clock); + final WatchParser watchParser = new WatchParser(settings, triggerService, registry, inputRegistry, cryptoService, clock); final ExecutionService executionService = new ExecutionService(settings, historyStore, triggeredWatchStore, watchExecutor, clock, watchParser, clusterService, client); @@ -413,7 +412,7 @@ public class Watcher implements ActionPlugin { settings.add(Setting.intSetting("xpack.watcher.execution.scroll.size", 0, Setting.Property.NodeScope)); settings.add(Setting.intSetting("xpack.watcher.watch.scroll.size", 0, Setting.Property.NodeScope)); settings.add(ENCRYPT_SENSITIVE_DATA_SETTING); - settings.add(ENCRYPTION_KEY_SETTING); + settings.add(WatcherField.ENCRYPTION_KEY_SETTING); settings.add(Setting.simpleString("xpack.watcher.internal.ops.search.default_timeout", Setting.Property.NodeScope)); settings.add(Setting.simpleString("xpack.watcher.internal.ops.bulk.default_timeout", Setting.Property.NodeScope)); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherFeatureSet.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherFeatureSet.java index d9d4bcf26e0..01483834eac 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherFeatureSet.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherFeatureSet.java @@ -18,9 +18,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.XPackFeatureSet; -import org.elasticsearch.xpack.XPackFeatureSet.Usage; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; public class WatcherFeatureSet implements XPackFeatureSet { @@ -37,7 +36,7 @@ public class WatcherFeatureSet implements XPackFeatureSet { @Override public String name() { - return XPackPlugin.WATCHER; + return XpackField.WATCHER; } @Override @@ -76,7 +75,7 @@ public class WatcherFeatureSet implements XPackFeatureSet { } public Usage(boolean available, boolean enabled, Map stats) { - super(XPackPlugin.WATCHER, available, enabled); + super(XpackField.WATCHER, available, enabled); this.stats = stats; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index dd874206d4f..022d5ce93e7 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.shard.IndexingOperationListener; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.WatchStoreUtils; import org.joda.time.DateTime; @@ -56,12 +57,12 @@ final class WatcherIndexingListener extends AbstractComponent implements Indexin static final Configuration INACTIVE = new Configuration(null, Collections.emptyMap()); - private final Watch.Parser parser; + private final WatchParser parser; private final Clock clock; private final TriggerService triggerService; private volatile Configuration configuration = INACTIVE; - WatcherIndexingListener(Settings settings, Watch.Parser parser, Clock clock, TriggerService triggerService) { + WatcherIndexingListener(Settings settings, WatchParser parser, Clock clock, TriggerService triggerService) { super(settings); this.parser = parser; this.clock = clock; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 407457a3b36..463a5ea35ee 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.watcher.execution.TriggeredWatch; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.WatchStoreUtils; import java.util.ArrayList; @@ -64,14 +65,14 @@ public class WatcherService extends AbstractComponent { private final ExecutionService executionService; private final TimeValue scrollTimeout; private final int scrollSize; - private final Watch.Parser parser; + private final WatchParser parser; private final Client client; // package-private for testing final AtomicReference state = new AtomicReference<>(WatcherState.STOPPED); private final TimeValue defaultSearchTimeout; public WatcherService(Settings settings, TriggerService triggerService, TriggeredWatchStore triggeredWatchStore, - ExecutionService executionService, Watch.Parser parser, Client client) { + ExecutionService executionService, WatchParser parser, Client client) { super(settings); this.triggerService = triggerService; this.triggeredWatchStore = triggeredWatchStore; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/AbstractCompareCondition.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/AbstractCompareCondition.java index 53375cce1cd..da51cf9313e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/AbstractCompareCondition.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/AbstractCompareCondition.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.condition; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.support.Variables; import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; @@ -12,22 +13,23 @@ import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import java.io.IOException; import java.time.Clock; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; -abstract class AbstractCompareCondition - extends Condition { +abstract class AbstractCompareCondition implements ExecutableCondition { static final Pattern DATE_MATH_PATTERN = Pattern.compile("<\\{(.+)\\}>"); static final Pattern PATH_PATTERN = Pattern.compile("\\{\\{(.+)\\}\\}"); private final Clock clock; + private final String type; protected AbstractCompareCondition(String type, Clock clock) { - super(type); this.clock = clock; + this.type = type; } @Override @@ -60,4 +62,14 @@ abstract class AbstractCompareCondition } protected abstract Result doExecute(Map model, Map resolvedValues); + + @Override + public String type() { + return type; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject().endObject(); + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareCondition.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareCondition.java index 0686e968b2c..50de6f5b558 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareCondition.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareCondition.java @@ -60,7 +60,7 @@ public final class ArrayCompareCondition extends AbstractCompareCondition { return quantifier; } - public static Condition parse(Clock clock, String watchId, XContentParser parser) throws IOException { + public static ArrayCompareCondition parse(Clock clock, String watchId, XContentParser parser) throws IOException { if (parser.currentToken() != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. expected an object but found [{}] " + "instead", TYPE, watchId, parser.currentToken()); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/CompareCondition.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/CompareCondition.java index 5d49dbf94db..17d0595ac4d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/CompareCondition.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/CompareCondition.java @@ -47,7 +47,7 @@ public final class CompareCondition extends AbstractCompareCondition { return value; } - public static Condition parse(Clock clock, String watchId, XContentParser parser) throws IOException { + public static CompareCondition parse(Clock clock, String watchId, XContentParser parser) throws IOException { if (parser.currentToken() != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. expected an object but found [{}] " + "instead", TYPE, watchId, parser.currentToken()); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/InternalAlwaysCondition.java similarity index 69% rename from plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java rename to plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/InternalAlwaysCondition.java index a8b4ce9e1b3..0aa6a80494a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/AlwaysCondition.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/InternalAlwaysCondition.java @@ -11,17 +11,14 @@ import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import java.io.IOException; -public final class AlwaysCondition extends Condition { +public final class InternalAlwaysCondition extends AlwaysCondition implements ExecutableCondition { - public static final String TYPE = "always"; public static final Result RESULT_INSTANCE = new Result(null, TYPE, true); - public static final Condition INSTANCE = new AlwaysCondition(); + public static final InternalAlwaysCondition INSTANCE = new InternalAlwaysCondition(); - private AlwaysCondition() { - super(TYPE); - } + private InternalAlwaysCondition() { } - public static Condition parse(String watchId, XContentParser parser) throws IOException { + public static InternalAlwaysCondition parse(String watchId, XContentParser parser) throws IOException { if (parser.currentToken() != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException("unable to parse [{}] condition for watch [{}]. expected an empty object but found [{}]", TYPE, watchId, parser.currentName()); @@ -39,14 +36,4 @@ public final class AlwaysCondition extends Condition { return RESULT_INSTANCE; } - @Override - public boolean equals(Object obj) { - return obj instanceof AlwaysCondition; - } - - @Override - public int hashCode() { - // All instances has to produce the same hashCode because they are all equal - return 0; - } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/NeverCondition.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/NeverCondition.java index 5382d0c364c..c4b661b3322 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/NeverCondition.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/NeverCondition.java @@ -6,22 +6,22 @@ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import java.io.IOException; -public final class NeverCondition extends Condition { +public final class NeverCondition implements ExecutableCondition { public static final String TYPE = "never"; public static final Result RESULT_INSTANCE = new Result(null, TYPE, false); - public static final Condition INSTANCE = new NeverCondition(); + public static final NeverCondition INSTANCE = new NeverCondition(); - private NeverCondition() { - super(TYPE); - } + private NeverCondition() { } - public static Condition parse(String watchId, XContentParser parser) throws IOException { + public static NeverCondition parse(String watchId, XContentParser parser) throws IOException { if (parser.currentToken() != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. expected an empty object but found [{}]", TYPE, watchId, parser.currentName()); @@ -49,4 +49,14 @@ public final class NeverCondition extends Condition { // All instances has to produce the same hashCode because they are all equal return 0; } + + @Override + public String type() { + return TYPE; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject().endObject(); + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java index 3b7c5be1079..919190297be 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java @@ -23,7 +23,7 @@ import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalState; /** * This class executes a script against the ctx payload and returns a boolean */ -public final class ScriptCondition extends Condition { +public final class ScriptCondition implements ExecutableCondition { public static final String TYPE = "script"; private static final Result MET = new Result(null, TYPE, true); private static final Result UNMET = new Result(null, TYPE, false); @@ -33,14 +33,12 @@ public final class ScriptCondition extends Condition { private final ExecutableScript.Factory scriptFactory; public ScriptCondition(Script script) { - super(TYPE); this.script = script; scriptService = null; scriptFactory = null; } ScriptCondition(Script script, ScriptService scriptService) { - super(TYPE); this.scriptService = scriptService; this.script = script; scriptFactory = scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT); @@ -98,4 +96,9 @@ public final class ScriptCondition extends Condition { public int hashCode() { return script.hashCode(); } + + @Override + public String type() { + return TYPE; + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java index 4d4d601aac7..3d8262c1f56 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.engine.DocumentMissingException; +import org.elasticsearch.xpack.watcher.actions.ActionWrapperResult; import org.elasticsearch.xpack.watcher.common.stats.Counters; import org.elasticsearch.xpack.watcher.Watcher; import org.elasticsearch.xpack.watcher.actions.ActionWrapper; @@ -44,6 +45,8 @@ import org.elasticsearch.xpack.watcher.input.Input; import org.elasticsearch.xpack.watcher.transform.Transform; import org.elasticsearch.xpack.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchField; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.WatchStatus; import org.joda.time.DateTime; @@ -79,7 +82,7 @@ public class ExecutionService extends AbstractComponent { private final Clock clock; private final TimeValue defaultThrottlePeriod; private final TimeValue maxStopTimeout; - private final Watch.Parser parser; + private final WatchParser parser; private final ClusterService clusterService; private final Client client; private final TimeValue indexDefaultTimeout; @@ -88,7 +91,7 @@ public class ExecutionService extends AbstractComponent { private final AtomicBoolean started = new AtomicBoolean(false); public ExecutionService(Settings settings, HistoryStore historyStore, TriggeredWatchStore triggeredWatchStore, WatchExecutor executor, - Clock clock, Watch.Parser parser, ClusterService clusterService, Client client) { + Clock clock, WatchParser parser, ClusterService clusterService, Client client) { super(settings); this.historyStore = historyStore; this.triggeredWatchStore = triggeredWatchStore; @@ -352,7 +355,7 @@ public class ExecutionService extends AbstractComponent { ToXContent.MapParams params = new ToXContent.MapParams(parameters); XContentBuilder source = JsonXContent.contentBuilder(). startObject() - .field(Watch.Field.STATUS.getPreferredName(), watch.status(), params) + .field(WatchField.STATUS.getPreferredName(), watch.status(), params) .endObject(); UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, watch.id()); @@ -466,7 +469,7 @@ public class ExecutionService extends AbstractComponent { ctx.beforeActions(); for (ActionWrapper action : watch.actions()) { long start = System.nanoTime(); - ActionWrapper.Result actionResult = action.execute(ctx); + ActionWrapperResult actionResult = action.execute(ctx); long executionTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start); String type = action.action().type(); actionByTypeExecutionTime.putIfAbsent(type, new MeanMetric()); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/InternalWatchExecutor.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/InternalWatchExecutor.java index 7acffe8955a..46377cf8010 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/InternalWatchExecutor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/InternalWatchExecutor.java @@ -7,14 +7,14 @@ package org.elasticsearch.xpack.watcher.execution; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import java.util.concurrent.BlockingQueue; import java.util.stream.Stream; public class InternalWatchExecutor implements WatchExecutor { - public static final String THREAD_POOL_NAME = XPackPlugin.WATCHER; + public static final String THREAD_POOL_NAME = XpackField.WATCHER; private final ThreadPool threadPool; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java index 32749b9d3a3..cccd7f3ca8a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.watcher.actions.Action; import org.elasticsearch.xpack.watcher.actions.ActionWrapper; +import org.elasticsearch.xpack.watcher.actions.ActionWrapperResult; import org.elasticsearch.xpack.watcher.condition.Condition; import org.elasticsearch.xpack.watcher.input.Input; import org.elasticsearch.xpack.watcher.trigger.manual.ManualTriggerEvent; @@ -50,12 +51,12 @@ public class ManualExecutionContext extends WatchExecutionContext { boolean throttleAll = allMode == ActionExecutionMode.SKIP; for (ActionWrapper action : watch.actions()) { if (throttleAll) { - onActionResult(new ActionWrapper.Result(action.id(), + onActionResult(new ActionWrapperResult(action.id(), new Action.Result.Throttled(action.action().type(), "manually skipped"))); } else { ActionExecutionMode mode = actionModes.get(action.id()); if (mode == ActionExecutionMode.SKIP) { - onActionResult(new ActionWrapper.Result(action.id(), + onActionResult(new ActionWrapperResult(action.id(), new Action.Result.Throttled(action.action().type(), "manually skipped"))); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraIssue.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraIssue.java index efbfea4e38e..79b10180f73 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraIssue.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraIssue.java @@ -163,7 +163,7 @@ public class JiraIssue implements ToXContentObject { } else if (Field.ERRORS.match(currentFieldName)) { Map fieldErrors = parser.mapOrdered(); for (Map.Entry entry : fieldErrors.entrySet()) { - errors.add("Field [" + entry.getKey() + "] has error [" + String.valueOf(entry.getValue()) + "]"); + errors.add("ThrottlerField [" + entry.getKey() + "] has error [" + String.valueOf(entry.getValue()) + "]"); } } else if (Field.ERROR_MESSAGES.match(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java index af7366b9a98..93d71fa7fef 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestAckWatchAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.transport.actions.ack.AckWatchRequest; import org.elasticsearch.xpack.watcher.transport.actions.ack.AckWatchResponse; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchField; import java.io.IOException; @@ -54,7 +54,7 @@ public class RestAckWatchAction extends WatcherRestHandler { @Override public RestResponse buildResponse(AckWatchResponse response, XContentBuilder builder) throws Exception { return new BytesRestResponse(RestStatus.OK, builder.startObject() - .field(Watch.Field.STATUS.getPreferredName(), response.getStatus(), WatcherParams.HIDE_SECRETS) + .field(WatchField.STATUS.getPreferredName(), response.getStatus(), WatcherParams.HIDE_SECRETS) .endObject()); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java index afb4941dac4..72a775d1a7e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestActivateWatchAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.watcher.rest.WatcherRestHandler; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.transport.actions.activate.ActivateWatchRequest; import org.elasticsearch.xpack.watcher.transport.actions.activate.ActivateWatchResponse; -import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchField; import java.io.IOException; @@ -51,7 +51,7 @@ public class RestActivateWatchAction extends WatcherRestHandler { @Override public RestResponse buildResponse(ActivateWatchResponse response, XContentBuilder builder) throws Exception { return new BytesRestResponse(RestStatus.OK, builder.startObject() - .field(Watch.Field.STATUS.getPreferredName(), response.getStatus(), WatcherParams.HIDE_SECRETS) + .field(WatchField.STATUS.getPreferredName(), response.getStatus(), WatcherParams.HIDE_SECRETS) .endObject()); } }); @@ -76,7 +76,7 @@ public class RestActivateWatchAction extends WatcherRestHandler { @Override public RestResponse buildResponse(ActivateWatchResponse response, XContentBuilder builder) throws Exception { return new BytesRestResponse(RestStatus.OK, builder.startObject() - .field(Watch.Field.STATUS.getPreferredName(), response.getStatus(), WatcherParams.HIDE_SECRETS) + .field(WatchField.STATUS.getPreferredName(), response.getStatus(), WatcherParams.HIDE_SECRETS) .endObject()); } }); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index cc2f06e9f3b..d9676559348 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; public abstract class WatcherTransportAction extends HandledTransportAction { @@ -41,7 +41,7 @@ public abstract class WatcherTransportAction { private final Clock clock; - private final Watch.Parser parser; + private final WatchParser parser; private final Client client; @Inject public TransportAckWatchAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Clock clock, XPackLicenseState licenseState, - Watch.Parser parser, Client client) { + WatchParser parser, Client client) { super(settings, AckWatchAction.NAME, transportService, threadPool, actionFilters, indexNameExpressionResolver, licenseState, AckWatchRequest::new); this.clock = clock; @@ -72,7 +74,7 @@ public class TransportAckWatchAction extends WatcherTransportAction actionIdsAsList = Arrays.asList(actionIds); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java index 1f543643356..61e95e474a1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java @@ -25,6 +25,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchField; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.WatchStatus; import org.joda.time.DateTime; @@ -43,13 +45,13 @@ import static org.joda.time.DateTimeZone.UTC; public class TransportActivateWatchAction extends WatcherTransportAction { private final Clock clock; - private final Watch.Parser parser; + private final WatchParser parser; private final Client client; @Inject public TransportActivateWatchAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Clock clock, - XPackLicenseState licenseState, Watch.Parser parser, Client client) { + XPackLicenseState licenseState, WatchParser parser, Client client) { super(settings, ActivateWatchAction.NAME, transportService, threadPool, actionFilters, indexNameExpressionResolver, licenseState, ActivateWatchRequest::new); this.clock = clock; @@ -97,7 +99,7 @@ public class TransportActivateWatchAction extends WatcherTransportAction listener, Watch watch, boolean knownWatch) { - threadPool.executor(XPackPlugin.WATCHER).submit(() -> { + threadPool.executor(XpackField.WATCHER).submit(() -> { try { // ensure that the headers from the incoming request are used instead those of the stored watch // otherwise the watch would run as the user who stored the watch, but it needs to be run as the user who @@ -131,7 +132,7 @@ public class TransportExecuteWatchAction extends WatcherTransportAction { - private final Watch.Parser parser; + private final WatchParser parser; private final Clock clock; private final Client client; @Inject public TransportGetWatchAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, XPackLicenseState licenseState, - Watch.Parser parser, Clock clock, Client client) { + WatchParser parser, Clock clock, Client client) { super(settings, GetWatchAction.NAME, transportService, threadPool, actionFilters, indexNameExpressionResolver, licenseState, GetWatchRequest::new); this.parser = parser; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java index 04adcee59f2..ac17f729f9e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.Payload; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.joda.time.DateTime; import java.time.Clock; @@ -40,13 +41,13 @@ import static org.joda.time.DateTimeZone.UTC; public class TransportPutWatchAction extends WatcherTransportAction { private final Clock clock; - private final Watch.Parser parser; + private final WatchParser parser; private final Client client; @Inject public TransportPutWatchAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Clock clock, XPackLicenseState licenseState, - Watch.Parser parser, Client client) { + WatchParser parser, Client client) { super(settings, PutWatchAction.NAME, transportService, threadPool, actionFilters, indexNameExpressionResolver, licenseState, PutWatchRequest::new); this.clock = clock; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsAction.java index a65dd88d899..9475db0929a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/stats/TransportWatcherStatsAction.java @@ -80,4 +80,4 @@ public class TransportWatcherStatsAction extends TransportNodesAction * Fields in cron expressions * - * Field Name + * ThrottlerField Name *   * Allowed Values *   diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java deleted file mode 100644 index ad0ae04bc64..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/Watch.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.watcher.watch; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.xpack.watcher.common.secret.Secret; -import org.elasticsearch.xpack.watcher.crypto.CryptoService; -import org.elasticsearch.xpack.watcher.watch.clock.HaltedClock; -import org.elasticsearch.xpack.watcher.actions.ActionRegistry; -import org.elasticsearch.xpack.watcher.actions.ActionStatus; -import org.elasticsearch.xpack.watcher.actions.ActionWrapper; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; -import org.elasticsearch.xpack.watcher.condition.Condition; -import org.elasticsearch.xpack.watcher.input.ExecutableInput; -import org.elasticsearch.xpack.watcher.input.InputRegistry; -import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput; -import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; -import org.elasticsearch.xpack.watcher.support.xcontent.WatcherXContentParser; -import org.elasticsearch.xpack.watcher.transform.ExecutableTransform; -import org.elasticsearch.xpack.watcher.trigger.Trigger; -import org.elasticsearch.xpack.watcher.trigger.TriggerService; -import org.joda.time.DateTime; - -import java.io.IOException; -import java.time.Clock; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; - -import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; -import static org.elasticsearch.xpack.watcher.support.Exceptions.ioException; -import static org.joda.time.DateTimeZone.UTC; - -public class Watch implements ToXContentObject { - - public static final String ALL_ACTIONS_ID = "_all"; - public static final String INCLUDE_STATUS_KEY = "include_status"; - public static final String INDEX = ".watches"; - public static final String DOC_TYPE = "doc"; - - private final String id; - private final Trigger trigger; - private final ExecutableInput input; - private final Condition condition; - @Nullable private final ExecutableTransform transform; - private final List actions; - @Nullable private final TimeValue throttlePeriod; - @Nullable private final Map metadata; - private final WatchStatus status; - - private transient long version = Versions.MATCH_ANY; - - public Watch(String id, Trigger trigger, ExecutableInput input, Condition condition, @Nullable ExecutableTransform transform, - @Nullable TimeValue throttlePeriod, List actions, @Nullable Map metadata, - WatchStatus status) { - this.id = id; - this.trigger = trigger; - this.input = input; - this.condition = condition; - this.transform = transform; - this.actions = actions; - this.throttlePeriod = throttlePeriod; - this.metadata = metadata; - this.status = status; - } - - public String id() { - return id; - } - - public Trigger trigger() { - return trigger; - } - - public ExecutableInput input() { return input;} - - public Condition condition() { - return condition; - } - - public ExecutableTransform transform() { - return transform; - } - - public TimeValue throttlePeriod() { - return throttlePeriod; - } - - public List actions() { - return actions; - } - - public Map metadata() { - return metadata; - } - - public WatchStatus status() { - return status; - } - - public long version() { - return version; - } - - public void version(long version) { - this.version = version; - } - - /** - * Sets the state of this watch to in/active - * - * @return {@code true} if the status of this watch changed, {@code false} otherwise. - */ - public boolean setState(boolean active, DateTime now) { - return status.setActive(active, now); - } - - /** - * Acks this watch. - * - * @return {@code true} if the status of this watch changed, {@code false} otherwise. - */ - public boolean ack(DateTime now, String... actions) { - return status.onAck(now, actions); - } - - public boolean acked(String actionId) { - ActionStatus actionStatus = status.actionStatus(actionId); - return actionStatus.ackStatus().state() == ActionStatus.AckStatus.State.ACKED; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Watch watch = (Watch) o; - return watch.id.equals(id); - } - - @Override - public int hashCode() { - return id.hashCode(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Field.TRIGGER.getPreferredName()).startObject().field(trigger.type(), trigger, params).endObject(); - builder.field(Field.INPUT.getPreferredName()).startObject().field(input.type(), input, params).endObject(); - builder.field(Field.CONDITION.getPreferredName()).startObject().field(condition.type(), condition, params).endObject(); - if (transform != null) { - builder.field(Field.TRANSFORM.getPreferredName()).startObject().field(transform.type(), transform, params).endObject(); - } - if (throttlePeriod != null) { - builder.timeValueField(Field.THROTTLE_PERIOD.getPreferredName(), - Field.THROTTLE_PERIOD_HUMAN.getPreferredName(), throttlePeriod); - } - builder.startObject(Field.ACTIONS.getPreferredName()); - for (ActionWrapper action : actions) { - builder.field(action.id(), action, params); - } - builder.endObject(); - if (metadata != null) { - builder.field(Field.METADATA.getPreferredName(), metadata); - } - if (params.paramAsBoolean(INCLUDE_STATUS_KEY, false)) { - builder.field(Field.STATUS.getPreferredName(), status, params); - } - builder.endObject(); - return builder; - } - - public static class Parser extends AbstractComponent { - - private final TriggerService triggerService; - private final ActionRegistry actionRegistry; - private final InputRegistry inputRegistry; - private final CryptoService cryptoService; - private final ExecutableInput defaultInput; - private final Condition defaultCondition; - private final List defaultActions; - private final Clock clock; - - public Parser(Settings settings, TriggerService triggerService, ActionRegistry actionRegistry, InputRegistry inputRegistry, - @Nullable CryptoService cryptoService, Clock clock) { - - super(settings); - this.triggerService = triggerService; - this.actionRegistry = actionRegistry; - this.inputRegistry = inputRegistry; - this.cryptoService = cryptoService; - this.defaultInput = new ExecutableNoneInput(logger); - this.defaultCondition = AlwaysCondition.INSTANCE; - this.defaultActions = Collections.emptyList(); - this.clock = clock; - } - - public Watch parse(String name, boolean includeStatus, BytesReference source, XContentType xContentType) throws IOException { - return parse(name, includeStatus, false, source, new DateTime(clock.millis(), UTC), xContentType); - } - - public Watch parse(String name, boolean includeStatus, BytesReference source, DateTime now, - XContentType xContentType) throws IOException { - return parse(name, includeStatus, false, source, now, xContentType); - } - - /** - * Parses the watch represented by the given source. When parsing, any sensitive data that the - * source might contain (e.g. passwords) will be converted to {@link Secret secrets} - * Such that the returned watch will potentially hide this sensitive data behind a "secret". A secret - * is an abstraction around sensitive data (text). When security is enabled, the - * {@link CryptoService} is used to encrypt the secrets. - * - * This method is only called once - when the user adds a new watch. From that moment on, all representations - * of the watch in the system will be use secrets for sensitive data. - * - */ - public Watch parseWithSecrets(String id, boolean includeStatus, BytesReference source, DateTime now, XContentType xContentType) - throws IOException { - return parse(id, includeStatus, true, source, now, xContentType); - } - - private Watch parse(String id, boolean includeStatus, boolean withSecrets, BytesReference source, DateTime now, - XContentType xContentType) throws IOException { - if (logger.isTraceEnabled()) { - logger.trace("parsing watch [{}] ", source.utf8ToString()); - } - XContentParser parser = null; - try { - // EMPTY is safe here because we never use namedObject - parser = new WatcherXContentParser(xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, source), - new HaltedClock(now), withSecrets ? cryptoService : null); - parser.nextToken(); - return parse(id, includeStatus, parser); - } catch (IOException ioe) { - throw ioException("could not parse watch [{}]", ioe, id); - } finally { - if (parser != null) { - parser.close(); - } - } - } - - public Watch parse(String id, boolean includeStatus, XContentParser parser) throws IOException { - Trigger trigger = null; - ExecutableInput input = defaultInput; - Condition condition = defaultCondition; - List actions = defaultActions; - ExecutableTransform transform = null; - TimeValue throttlePeriod = null; - Map metatdata = null; - WatchStatus status = null; - - String currentFieldName = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == null ) { - throw new ElasticsearchParseException("could not parse watch [{}]. null token", id); - } else if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token == null || currentFieldName == null) { - throw new ElasticsearchParseException("could not parse watch [{}], unexpected token [{}]", id, token); - } else if (Field.TRIGGER.match(currentFieldName)) { - trigger = triggerService.parseTrigger(id, parser); - } else if (Field.INPUT.match(currentFieldName)) { - input = inputRegistry.parse(id, parser); - } else if (Field.CONDITION.match(currentFieldName)) { - condition = actionRegistry.getConditionRegistry().parseExecutable(id, parser); - } else if (Field.TRANSFORM.match(currentFieldName)) { - transform = actionRegistry.getTransformRegistry().parse(id, parser); - } else if (Field.THROTTLE_PERIOD.match(currentFieldName)) { - throttlePeriod = timeValueMillis(parser.longValue()); - } else if (Field.THROTTLE_PERIOD_HUMAN.match(currentFieldName)) { - // Parser for human specified and 2.x backwards compatible throttle period - try { - throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, Field.THROTTLE_PERIOD_HUMAN.toString()); - } catch (ElasticsearchParseException pe) { - throw new ElasticsearchParseException("could not parse watch [{}]. failed to parse time value for field [{}]", - pe, id, currentFieldName); - } - } else if (Field.ACTIONS.match(currentFieldName)) { - actions = actionRegistry.parseActions(id, parser); - } else if (Field.METADATA.match(currentFieldName)) { - metatdata = parser.map(); - } else if (Field.STATUS.match(currentFieldName)) { - if (includeStatus) { - status = WatchStatus.parse(id, parser, clock); - } else { - parser.skipChildren(); - } - } else { - throw new ElasticsearchParseException("could not parse watch [{}]. unexpected field [{}]", id, currentFieldName); - } - } - if (trigger == null) { - throw new ElasticsearchParseException("could not parse watch [{}]. missing required field [{}]", id, - Field.TRIGGER.getPreferredName()); - } - - if (status != null) { - // verify the status is valid (that every action indeed has a status) - for (ActionWrapper action : actions) { - if (status.actionStatus(action.id()) == null) { - throw new ElasticsearchParseException("could not parse watch [{}]. watch status in invalid state. action [{}] " + - "status is missing", id, action.id()); - } - } - } else { - // we need to create the initial statuses for the actions - Map actionsStatuses = new HashMap<>(); - DateTime now = new DateTime(WatcherXContentParser.clock(parser).millis(), UTC); - for (ActionWrapper action : actions) { - actionsStatuses.put(action.id(), new ActionStatus(now)); - } - status = new WatchStatus(now, unmodifiableMap(actionsStatuses)); - } - - return new Watch(id, trigger, input, condition, transform, throttlePeriod, actions, metatdata, status); - } - } - - public interface Field { - ParseField TRIGGER = new ParseField("trigger"); - ParseField INPUT = new ParseField("input"); - ParseField CONDITION = new ParseField("condition"); - ParseField ACTIONS = new ParseField("actions"); - ParseField TRANSFORM = new ParseField("transform"); - ParseField THROTTLE_PERIOD = new ParseField("throttle_period_in_millis"); - ParseField THROTTLE_PERIOD_HUMAN = new ParseField("throttle_period"); - ParseField METADATA = new ParseField("metadata"); - ParseField STATUS = new ParseField("status"); - } - - private static final Pattern NO_WS_PATTERN = Pattern.compile("\\S+"); - - public static boolean isValidId(String id) { - return Strings.isEmpty(id) == false && NO_WS_PATTERN.matcher(id).matches(); - } - -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java new file mode 100644 index 00000000000..91ebc449828 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java @@ -0,0 +1,194 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.watch; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.watcher.actions.ActionRegistry; +import org.elasticsearch.xpack.watcher.actions.ActionStatus; +import org.elasticsearch.xpack.watcher.actions.ActionWrapper; +import org.elasticsearch.xpack.watcher.common.secret.Secret; +import org.elasticsearch.xpack.watcher.condition.ExecutableCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; +import org.elasticsearch.xpack.watcher.crypto.CryptoService; +import org.elasticsearch.xpack.watcher.input.ExecutableInput; +import org.elasticsearch.xpack.watcher.input.InputRegistry; +import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput; +import org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils; +import org.elasticsearch.xpack.watcher.support.xcontent.WatcherXContentParser; +import org.elasticsearch.xpack.watcher.transform.ExecutableTransform; +import org.elasticsearch.xpack.watcher.trigger.Trigger; +import org.elasticsearch.xpack.watcher.trigger.TriggerService; +import org.elasticsearch.xpack.watcher.watch.clock.HaltedClock; +import org.joda.time.DateTime; + +import java.io.IOException; +import java.time.Clock; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; +import static org.elasticsearch.xpack.watcher.support.Exceptions.ioException; +import static org.joda.time.DateTimeZone.UTC; + +public class WatchParser extends AbstractComponent { + + private final TriggerService triggerService; + private final ActionRegistry actionRegistry; + private final InputRegistry inputRegistry; + private final CryptoService cryptoService; + private final ExecutableInput defaultInput; + private final ExecutableCondition defaultCondition; + private final List defaultActions; + private final Clock clock; + + public WatchParser(Settings settings, TriggerService triggerService, ActionRegistry actionRegistry, InputRegistry inputRegistry, + @Nullable CryptoService cryptoService, Clock clock) { + + super(settings); + this.triggerService = triggerService; + this.actionRegistry = actionRegistry; + this.inputRegistry = inputRegistry; + this.cryptoService = cryptoService; + this.defaultInput = new ExecutableNoneInput(logger); + this.defaultCondition = InternalAlwaysCondition.INSTANCE; + this.defaultActions = Collections.emptyList(); + this.clock = clock; + } + + public Watch parse(String name, boolean includeStatus, BytesReference source, XContentType xContentType) throws IOException { + return parse(name, includeStatus, false, source, new DateTime(clock.millis(), UTC), xContentType); + } + + public Watch parse(String name, boolean includeStatus, BytesReference source, DateTime now, + XContentType xContentType) throws IOException { + return parse(name, includeStatus, false, source, now, xContentType); + } + + /** + * Parses the watch represented by the given source. When parsing, any sensitive data that the + * source might contain (e.g. passwords) will be converted to {@link Secret secrets} + * Such that the returned watch will potentially hide this sensitive data behind a "secret". A secret + * is an abstraction around sensitive data (text). When security is enabled, the + * {@link CryptoService} is used to encrypt the secrets. + * + * This method is only called once - when the user adds a new watch. From that moment on, all representations + * of the watch in the system will be use secrets for sensitive data. + * + */ + public Watch parseWithSecrets(String id, boolean includeStatus, BytesReference source, DateTime now, XContentType xContentType) + throws IOException { + return parse(id, includeStatus, true, source, now, xContentType); + } + + private Watch parse(String id, boolean includeStatus, boolean withSecrets, BytesReference source, DateTime now, + XContentType xContentType) throws IOException { + if (logger.isTraceEnabled()) { + logger.trace("parsing watch [{}] ", source.utf8ToString()); + } + XContentParser parser = null; + try { + // EMPTY is safe here because we never use namedObject + parser = new WatcherXContentParser(xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, source), + new HaltedClock(now), withSecrets ? cryptoService : null); + parser.nextToken(); + return parse(id, includeStatus, parser); + } catch (IOException ioe) { + throw ioException("could not parse watch [{}]", ioe, id); + } finally { + if (parser != null) { + parser.close(); + } + } + } + + public Watch parse(String id, boolean includeStatus, XContentParser parser) throws IOException { + Trigger trigger = null; + ExecutableInput input = defaultInput; + ExecutableCondition condition = defaultCondition; + List actions = defaultActions; + ExecutableTransform transform = null; + TimeValue throttlePeriod = null; + Map metatdata = null; + WatchStatus status = null; + + String currentFieldName = null; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == null ) { + throw new ElasticsearchParseException("could not parse watch [{}]. null token", id); + } else if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == null || currentFieldName == null) { + throw new ElasticsearchParseException("could not parse watch [{}], unexpected token [{}]", id, token); + } else if (WatchField.TRIGGER.match(currentFieldName)) { + trigger = triggerService.parseTrigger(id, parser); + } else if (WatchField.INPUT.match(currentFieldName)) { + input = inputRegistry.parse(id, parser); + } else if (WatchField.CONDITION.match(currentFieldName)) { + condition = actionRegistry.getConditionRegistry().parseExecutable(id, parser); + } else if (WatchField.TRANSFORM.match(currentFieldName)) { + transform = actionRegistry.getTransformRegistry().parse(id, parser); + } else if (WatchField.THROTTLE_PERIOD.match(currentFieldName)) { + throttlePeriod = timeValueMillis(parser.longValue()); + } else if (WatchField.THROTTLE_PERIOD_HUMAN.match(currentFieldName)) { + // Parser for human specified and 2.x backwards compatible throttle period + try { + throttlePeriod = WatcherDateTimeUtils.parseTimeValue(parser, WatchField.THROTTLE_PERIOD_HUMAN.toString()); + } catch (ElasticsearchParseException pe) { + throw new ElasticsearchParseException("could not parse watch [{}]. failed to parse time value for field [{}]", + pe, id, currentFieldName); + } + } else if (WatchField.ACTIONS.match(currentFieldName)) { + actions = actionRegistry.parseActions(id, parser); + } else if (WatchField.METADATA.match(currentFieldName)) { + metatdata = parser.map(); + } else if (WatchField.STATUS.match(currentFieldName)) { + if (includeStatus) { + status = WatchStatus.parse(id, parser, clock); + } else { + parser.skipChildren(); + } + } else { + throw new ElasticsearchParseException("could not parse watch [{}]. unexpected field [{}]", id, currentFieldName); + } + } + if (trigger == null) { + throw new ElasticsearchParseException("could not parse watch [{}]. missing required field [{}]", id, + WatchField.TRIGGER.getPreferredName()); + } + + if (status != null) { + // verify the status is valid (that every action indeed has a status) + for (ActionWrapper action : actions) { + if (status.actionStatus(action.id()) == null) { + throw new ElasticsearchParseException("could not parse watch [{}]. watch status in invalid state. action [{}] " + + "status is missing", id, action.id()); + } + } + } else { + // we need to create the initial statuses for the actions + Map actionsStatuses = new HashMap<>(); + DateTime now = new DateTime(WatcherXContentParser.clock(parser).millis(), UTC); + for (ActionWrapper action : actions) { + actionsStatuses.put(action.id(), new ActionStatus(now)); + } + status = new WatchStatus(now, unmodifiableMap(actionsStatuses)); + } + + return new Watch(id, trigger, input, condition, transform, throttlePeriod, actions, metatdata, status); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/integration/ldap/ActiveDirectoryRunAsTests.java b/plugin/src/test/java/org/elasticsearch/integration/ldap/ActiveDirectoryRunAsTests.java index 96b52814bfd..93bf96498f3 100644 --- a/plugin/src/test/java/org/elasticsearch/integration/ldap/ActiveDirectoryRunAsTests.java +++ b/plugin/src/test/java/org/elasticsearch/integration/ldap/ActiveDirectoryRunAsTests.java @@ -16,15 +16,11 @@ import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.xpack.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.security.action.user.AuthenticateRequest; import org.elasticsearch.xpack.security.action.user.AuthenticateResponse; -import org.elasticsearch.xpack.security.authc.AuthenticationService; -import org.elasticsearch.xpack.security.authc.Realm; -import org.elasticsearch.xpack.security.authc.Realms; +import org.elasticsearch.xpack.security.authc.AuthenticationServiceField; import org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactoryTests; -import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.user.ElasticUser; import org.hamcrest.Matchers; -import org.junit.AfterClass; import org.junit.BeforeClass; import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; @@ -67,7 +63,7 @@ public class ActiveDirectoryRunAsTests extends AbstractAdLdapRealmTestCase { protected Client runAsClient(String user) { final Map headers = MapBuilder.newMapBuilder() .put(BASIC_AUTH_HEADER, UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, BOOTSTRAP_PASSWORD)) - .put(AuthenticationService.RUN_AS_USER_HEADER, user) + .put(AuthenticationServiceField.RUN_AS_USER_HEADER, user) .map(); return client().filterWithHeader(headers); } diff --git a/plugin/src/test/java/org/elasticsearch/license/LicensingTests.java b/plugin/src/test/java/org/elasticsearch/license/LicensingTests.java index bc187231ee3..0e89ed7a575 100644 --- a/plugin/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/plugin/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.TestXPackTransportClient; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.action.user.GetUsersResponse; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; @@ -244,7 +244,7 @@ public class LicensingTests extends SecurityIntegTestCase { private static void assertElasticsearchSecurityException(ThrowingRunnable runnable) { ElasticsearchSecurityException ee = expectThrows(ElasticsearchSecurityException.class, runnable); - assertThat(ee.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackPlugin.SECURITY)); + assertThat(ee.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XpackField.SECURITY)); assertThat(ee.status(), is(RestStatus.FORBIDDEN)); } diff --git a/plugin/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java b/plugin/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java index f940f741724..df40f6604a6 100644 --- a/plugin/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java +++ b/plugin/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.TestXPackTransportClient; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.ml.action.CloseJobAction; import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction; import org.elasticsearch.xpack.ml.action.DeleteJobAction; @@ -67,7 +67,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase { } catch (ElasticsearchSecurityException e) { assertThat(e.status(), is(RestStatus.FORBIDDEN)); assertThat(e.getMessage(), containsString("non-compliant")); - assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackPlugin.MACHINE_LEARNING)); + assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XpackField.MACHINE_LEARNING)); } // Pick a license that does allow machine learning @@ -110,7 +110,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase { } catch (ElasticsearchSecurityException e) { assertThat(e.status(), is(RestStatus.FORBIDDEN)); assertThat(e.getMessage(), containsString("non-compliant")); - assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackPlugin.MACHINE_LEARNING)); + assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XpackField.MACHINE_LEARNING)); } // Pick a license that does allow machine learning @@ -162,7 +162,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase { } catch (ElasticsearchSecurityException e) { assertThat(e.status(), is(RestStatus.FORBIDDEN)); assertThat(e.getMessage(), containsString("non-compliant")); - assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackPlugin.MACHINE_LEARNING)); + assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XpackField.MACHINE_LEARNING)); } // Pick a license that does allow machine learning @@ -331,7 +331,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase { } catch (ElasticsearchSecurityException e) { assertThat(e.status(), is(RestStatus.FORBIDDEN)); assertThat(e.getMessage(), containsString("non-compliant")); - assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackPlugin.MACHINE_LEARNING)); + assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XpackField.MACHINE_LEARNING)); } // Pick a license that does allow machine learning diff --git a/plugin/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/plugin/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index 2a2c60f667f..8f6dc4c0ee4 100644 --- a/plugin/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/plugin/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -7,8 +7,7 @@ package org.elasticsearch.license; import org.elasticsearch.license.License.OperationMode; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.logstash.Logstash; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.monitoring.Monitoring; import org.hamcrest.Matchers; @@ -186,25 +185,25 @@ public class XPackLicenseStateTests extends ESTestCase { public void testSecurityAckBasicToNotGoldOrStandard() { OperationMode toMode = randomFrom(OperationMode.values(), mode -> mode != GOLD && mode != STANDARD); - assertAckMesssages(XPackPlugin.SECURITY, BASIC, toMode, 0); + assertAckMesssages(XpackField.SECURITY, BASIC, toMode, 0); } public void testSecurityAckAnyToTrialOrPlatinum() { - assertAckMesssages(XPackPlugin.SECURITY, randomMode(), randomTrialOrPlatinumMode(), 0); + assertAckMesssages(XpackField.SECURITY, randomMode(), randomTrialOrPlatinumMode(), 0); } public void testSecurityAckTrialStandardGoldOrPlatinumToBasic() { - assertAckMesssages(XPackPlugin.SECURITY, randomTrialStandardGoldOrPlatinumMode(), BASIC, 3); + assertAckMesssages(XpackField.SECURITY, randomTrialStandardGoldOrPlatinumMode(), BASIC, 3); } public void testSecurityAckAnyToStandard() { OperationMode from = randomFrom(BASIC, GOLD, PLATINUM, TRIAL); - assertAckMesssages(XPackPlugin.SECURITY, from, STANDARD, 4); + assertAckMesssages(XpackField.SECURITY, from, STANDARD, 4); } public void testSecurityAckBasicStandardTrialOrPlatinumToGold() { OperationMode from = randomFrom(BASIC, PLATINUM, TRIAL, STANDARD); - assertAckMesssages(XPackPlugin.SECURITY, from, GOLD, 2); + assertAckMesssages(XpackField.SECURITY, from, GOLD, 2); } public void testMonitoringAckBasicToAny() { @@ -390,11 +389,11 @@ public class XPackLicenseStateTests extends ESTestCase { } public void testSqlAckAnyToTrialOrPlatinum() { - assertAckMesssages(XPackPlugin.SQL, randomMode(), randomTrialOrPlatinumMode(), 0); + assertAckMesssages(XpackField.SQL, randomMode(), randomTrialOrPlatinumMode(), 0); } public void testSqlAckTrialOrPlatinumToNotTrialOrPlatinum() { - assertAckMesssages(XPackPlugin.SQL, randomTrialOrPlatinumMode(), randomBasicStandardOrGold(), 1); + assertAckMesssages(XpackField.SQL, randomTrialOrPlatinumMode(), randomBasicStandardOrGold(), 1); } } diff --git a/plugin/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/plugin/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index e9104ea7e61..19aa1bbbd8a 100644 --- a/plugin/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/plugin/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -22,6 +22,7 @@ import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail; @@ -111,7 +112,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas public Settings nodeSettings(int nodeOrdinal) { final Path home = nodePath(parentFolder, subfolderPrefix, nodeOrdinal); SecurityTestUtils.createFolder(home); - final Path xpackConf = home.resolve("config").resolve(XPackPlugin.NAME); + final Path xpackConf = home.resolve("config").resolve(XpackField.NAME); SecurityTestUtils.createFolder(xpackConf); writeFile(xpackConf, "users", configUsers()); writeFile(xpackConf, "users_roles", configUsersRoles()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java index 9f6248f4913..11607e8ab42 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java @@ -5,19 +5,13 @@ */ package org.elasticsearch.xpack.deprecation; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; import static org.hamcrest.core.IsEqual.equalTo; public class DeprecationChecksTests extends ESTestCase { @@ -33,7 +27,7 @@ public class DeprecationChecksTests extends ESTestCase { for (int i = 0; i < numChecksPassed; i++) { checks.add(() -> null); } - List filteredIssues = DeprecationChecks.filterChecks(checks, Supplier::get); + List filteredIssues = DeprecationInfoAction.filterChecks(checks, Supplier::get); assertThat(filteredIssues.size(), equalTo(numChecksFailed)); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index fa8a21b19a7..6e064976693 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -16,7 +16,6 @@ import java.util.List; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; -import static org.hamcrest.core.IsEqual.equalTo; public class IndexDeprecationChecksTests extends ESTestCase { @@ -27,7 +26,7 @@ public class IndexDeprecationChecksTests extends ESTestCase { .numberOfShards(1) .numberOfReplicas(0) .build(); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + List issues = DeprecationInfoAction.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); assertEquals(singletonList(expected), issues); } @@ -78,7 +77,7 @@ public class IndexDeprecationChecksTests extends ESTestCase { "breaking_60_mappings_changes.html#_coercion_of_boolean_fields", "[[type: testBooleanCoercion, field: my_boolean], [type: testBooleanCoercion, field: my_inner_boolean]," + " [type: testBooleanCoercion, field: my_text, multifield: raw]]"); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + List issues = DeprecationInfoAction.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); assertEquals(singletonList(expected), issues); } @@ -118,7 +117,7 @@ public class IndexDeprecationChecksTests extends ESTestCase { "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + "breaking_60_mappings_changes.html#_unrecognized_literal_match_mapping_type_literal_options_not_silently_ignored", "[type: test, dynamicFieldDefinitionintegers, unknown match_mapping_type[UNKNOWN_VALUE]]"); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + List issues = DeprecationInfoAction.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); assertEquals(singletonList(expected), issues); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java index 008e71c55a2..35a35d038c2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.XPackFeatureSet.Usage; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.ml.action.util.QueryPage; @@ -160,7 +160,7 @@ public class MachineLearningFeatureSetTests extends ESTestCase { for (XPackFeatureSet.Usage usage : Arrays.asList(mlUsage, serializedUsage)) { assertThat(usage, is(notNullValue())); - assertThat(usage.name(), is(XPackPlugin.MACHINE_LEARNING)); + assertThat(usage.name(), is(XpackField.MACHINE_LEARNING)); assertThat(usage.enabled(), is(true)); assertThat(usage.available(), is(true)); XContentSource source; @@ -240,7 +240,7 @@ public class MachineLearningFeatureSetTests extends ESTestCase { } ClusterState clusterState = new ClusterState.Builder(ClusterState.EMPTY_STATE) .metaData(new MetaData.Builder() - .putCustom(MlMetadata.TYPE, mlMetadataBuilder.build())) + .putCustom(MLMetadataField.TYPE, mlMetadataBuilder.build())) .build(); when(clusterService.state()).thenReturn(clusterState); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlClientHelperTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlClientHelperTests.java index dfbc479bc48..6533ddbd3c8 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlClientHelperTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlClientHelperTests.java @@ -15,8 +15,8 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.security.authc.Authentication; -import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.elasticsearch.xpack.security.authc.AuthenticationField; +import org.elasticsearch.xpack.security.authc.AuthenticationServiceField; import org.junit.Before; import java.util.Collections; @@ -61,15 +61,15 @@ public class MlClientHelperTests extends ESTestCase { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed-foo", "foo"); builder.setIndices(Collections.singletonList("foo-index")); Map headers = MapBuilder.newMapBuilder() - .put(Authentication.AUTHENTICATION_KEY, "anything") - .put(AuthenticationService.RUN_AS_USER_HEADER, "anything") + .put(AuthenticationField.AUTHENTICATION_KEY, "anything") + .put(AuthenticationServiceField.RUN_AS_USER_HEADER, "anything") .map(); builder.setHeaders(headers); assertRunAsExecution(builder.build(), h -> { assertThat(h.keySet(), hasSize(2)); - assertThat(h, hasEntry(Authentication.AUTHENTICATION_KEY, "anything")); - assertThat(h, hasEntry(AuthenticationService.RUN_AS_USER_HEADER, "anything")); + assertThat(h, hasEntry(AuthenticationField.AUTHENTICATION_KEY, "anything")); + assertThat(h, hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "anything")); }); } @@ -94,8 +94,8 @@ public class MlClientHelperTests extends ESTestCase { // Check that headers are not set Map headers = client.threadPool().getThreadContext().getHeaders(); - assertThat(headers, not(hasEntry(Authentication.AUTHENTICATION_KEY, "anything"))); - assertThat(headers, not(hasEntry(AuthenticationService.RUN_AS_USER_HEADER, "anything"))); + assertThat(headers, not(hasEntry(AuthenticationField.AUTHENTICATION_KEY, "anything"))); + assertThat(headers, not(hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "anything"))); return client.search(new SearchRequest()).actionGet(); }); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java index e4672af0b8a..aa56f359e23 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java @@ -105,7 +105,7 @@ public class MlInitializationServiceTests extends ESTestCase { .localNodeId("_node_id") .masterNodeId("_node_id")) .metaData(MetaData.builder() - .putCustom(MlMetadata.TYPE, new MlMetadata.Builder().build())) + .putCustom(MLMetadataField.TYPE, new MlMetadata.Builder().build())) .build(); MlDailyMaintenanceService initialDailyMaintenanceService = mock(MlDailyMaintenanceService.class); initializationService.setDailyMaintenanceService(initialDailyMaintenanceService); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index 3ff569f81e1..2c27fc1e5f7 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -282,7 +282,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); StartDatafeedAction.DatafeedParams params = new StartDatafeedAction.DatafeedParams(datafeedConfig1.getId(), 0L); - tasksBuilder.addTask(MlMetadata.datafeedTaskId("datafeed1"), StartDatafeedAction.TASK_NAME, params, INITIAL_ASSIGNMENT); + tasksBuilder.addTask(MLMetadataField.datafeedTaskId("datafeed1"), StartDatafeedAction.TASK_NAME, params, INITIAL_ASSIGNMENT); PersistentTasksCustomMetaData tasksInProgress = tasksBuilder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); @@ -343,7 +343,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); StartDatafeedAction.DatafeedParams params = new StartDatafeedAction.DatafeedParams("datafeed1", 0L); - tasksBuilder.addTask(MlMetadata.datafeedTaskId("datafeed1"), StartDatafeedAction.TASK_NAME, params, INITIAL_ASSIGNMENT); + tasksBuilder.addTask(MLMetadataField.datafeedTaskId("datafeed1"), StartDatafeedAction.TASK_NAME, params, INITIAL_ASSIGNMENT); PersistentTasksCustomMetaData tasksInProgress = tasksBuilder.build(); MlMetadata.Builder builder2 = new MlMetadata.Builder(result); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java index 987116e3101..1334a845b6f 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.CloseJobAction.Request; import org.elasticsearch.xpack.ml.datafeed.DatafeedState; @@ -87,7 +88,8 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> CloseJobAction.validateJobAndTaskState("job_id", mlBuilder.build(), startDataFeedTaskBuilder.build())); + () -> TransportCloseJobAction.validateJobAndTaskState("job_id", mlBuilder.build(), + startDataFeedTaskBuilder.build())); assertEquals(RestStatus.CONFLICT, e.status()); assertEquals("cannot close job [job_id], datafeed hasn't been stopped", e.getMessage()); @@ -97,7 +99,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa addTask("datafeed_id", 0L, null, DatafeedState.STOPPED, dataFeedNotStartedTaskBuilder); } - CloseJobAction.validateJobAndTaskState("job_id", mlBuilder.build(), dataFeedNotStartedTaskBuilder.build()); + TransportCloseJobAction.validateJobAndTaskState("job_id", mlBuilder.build(), dataFeedNotStartedTaskBuilder.build()); } public void testValidate_jobIsOpening() { @@ -108,7 +110,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("opening-job", null, null, tasksBuilder); - CloseJobAction.validateJobAndTaskState("opening-job", mlBuilder.build(), tasksBuilder.build()); + TransportCloseJobAction.validateJobAndTaskState("opening-job", mlBuilder.build(), tasksBuilder.build()); } public void testValidate_jobIsMissing() { @@ -118,7 +120,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa addJobTask("missing-job", null, null, tasksBuilder); expectThrows(ResourceNotFoundException.class, () -> - CloseJobAction.validateJobAndTaskState("missing-job", mlBuilder.build(), tasksBuilder.build())); + TransportCloseJobAction.validateJobAndTaskState("missing-job", mlBuilder.build(), tasksBuilder.build())); } public void testResolve_givenAll() { @@ -136,7 +138,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa addJobTask("job_id_4", null, JobState.CLOSING, tasksBuilder); ClusterState cs1 = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlBuilder.build()) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())) .build(); @@ -145,13 +147,13 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa CloseJobAction.Request request = new CloseJobAction.Request("_all"); request.setForce(true); - CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); + TransportCloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); assertEquals(Arrays.asList("job_id_1", "job_id_2", "job_id_3"), openJobs); assertEquals(Collections.singletonList("job_id_4"), closingJobs); request.setForce(false); expectThrows(ElasticsearchStatusException.class, - () -> CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs)); + () -> TransportCloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs)); } public void testResolve_givenJobId() { @@ -162,7 +164,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa addJobTask("job_id_1", null, JobState.OPENED, tasksBuilder); ClusterState cs1 = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlBuilder.build()) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())) .build(); @@ -170,18 +172,18 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa List closingJobs = new ArrayList<>(); CloseJobAction.Request request = new CloseJobAction.Request("job_id_1"); - CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); + TransportCloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); assertEquals(Collections.singletonList("job_id_1"), openJobs); assertEquals(Collections.emptyList(), closingJobs); // Job without task is closed cs1 = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build())) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlBuilder.build())) .build(); openJobs.clear(); closingJobs.clear(); - CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); + TransportCloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); assertEquals(Collections.emptyList(), openJobs); assertEquals(Collections.emptyList(), closingJobs); } @@ -191,7 +193,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa mlBuilder.putJob(BaseMlIntegTestCase.createFareQuoteJob("job_id_1").build(new Date()), false); ClusterState cs1 = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build())) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlBuilder.build())) .build(); List openJobs = new ArrayList<>(); @@ -199,7 +201,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa CloseJobAction.Request request = new CloseJobAction.Request("missing-job"); expectThrows(ResourceNotFoundException.class, - () -> CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs)); + () -> TransportCloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs)); } public void testResolve_givenJobIdFailed() { @@ -210,7 +212,8 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa addJobTask("job_id_failed", null, JobState.FAILED, tasksBuilder); ClusterState cs1 = ClusterState.builder(new ClusterName("_name")).metaData(new MetaData.Builder() - .putCustom(MlMetadata.TYPE, mlBuilder.build()).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())).build(); + .putCustom(MLMetadataField.TYPE, mlBuilder.build()).putCustom(PersistentTasksCustomMetaData.TYPE, + tasksBuilder.build())).build(); List openJobs = new ArrayList<>(); List closingJobs = new ArrayList<>(); @@ -218,7 +221,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa CloseJobAction.Request request = new CloseJobAction.Request("job_id_failed"); request.setForce(true); - CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); + TransportCloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); assertEquals(Collections.singletonList("job_id_failed"), openJobs); assertEquals(Collections.emptyList(), closingJobs); @@ -226,7 +229,8 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa closingJobs.clear(); request.setForce(false); - expectThrows(ElasticsearchStatusException.class, () -> CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs)); + expectThrows(ElasticsearchStatusException.class, () -> TransportCloseJobAction.resolveAndValidateJobId(request, cs1, + openJobs, closingJobs)); } public void testResolve_withSpecificJobIds() { @@ -243,32 +247,32 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa // closed job has no task ClusterState cs1 = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlBuilder.build()) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())) .build(); List openJobs = new ArrayList<>(); List closingJobs = new ArrayList<>(); - CloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("_all"), cs1, openJobs, closingJobs); + TransportCloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("_all"), cs1, openJobs, closingJobs); assertEquals(Arrays.asList("job_id_open-1", "job_id_open-2"), openJobs); assertEquals(Collections.singletonList("job_id_closing"), closingJobs); openJobs.clear(); closingJobs.clear(); - CloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("*open*"), cs1, openJobs, closingJobs); + TransportCloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("*open*"), cs1, openJobs, closingJobs); assertEquals(Arrays.asList("job_id_open-1", "job_id_open-2"), openJobs); assertEquals(Collections.emptyList(), closingJobs); openJobs.clear(); closingJobs.clear(); - CloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("job_id_closing"), cs1, openJobs, closingJobs); + TransportCloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("job_id_closing"), cs1, openJobs, closingJobs); assertEquals(Collections.emptyList(), openJobs); assertEquals(Collections.singletonList("job_id_closing"), closingJobs); openJobs.clear(); closingJobs.clear(); - CloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("job_id_open-1"), cs1, openJobs, closingJobs); + TransportCloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("job_id_open-1"), cs1, openJobs, closingJobs); assertEquals(Collections.singletonList("job_id_open-1"), openJobs); assertEquals(Collections.emptyList(), closingJobs); openJobs.clear(); @@ -283,14 +287,14 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa addJobTask("foo", null, JobState.CLOSED, tasksBuilder); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlBuilder.build()) .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())) .build(); ClusterService clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(clusterState); - CloseJobAction.TransportAction transportAction = new CloseJobAction.TransportAction(Settings.EMPTY, + TransportCloseJobAction transportAction = new TransportCloseJobAction(Settings.EMPTY, mock(TransportService.class), mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), clusterService, mock(Client.class), mock(Auditor.class), mock(PersistentTasksService.class)); @@ -324,23 +328,23 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa addJobTask("openjob2", null, JobState.OPENED, tasksBuilder); addJobTask("closingjob1", null, JobState.CLOSING, tasksBuilder); - CloseJobAction.TransportAction.WaitForCloseRequest waitForCloseRequest = - CloseJobAction.buildWaitForCloseRequest(openJobIds, closingJobIds, tasksBuilder.build(), mock(Auditor.class)); + TransportCloseJobAction.WaitForCloseRequest waitForCloseRequest = + TransportCloseJobAction.buildWaitForCloseRequest(openJobIds, closingJobIds, tasksBuilder.build(), mock(Auditor.class)); assertEquals(waitForCloseRequest.jobsToFinalize, Arrays.asList("openjob1", "openjob2")); assertEquals(waitForCloseRequest.persistentTaskIds, Arrays.asList("job-openjob1", "job-openjob2", "job-closingjob1")); assertTrue(waitForCloseRequest.hasJobsToWaitFor()); - waitForCloseRequest = CloseJobAction.buildWaitForCloseRequest(Collections.emptyList(), Collections.emptyList(), + waitForCloseRequest = TransportCloseJobAction.buildWaitForCloseRequest(Collections.emptyList(), Collections.emptyList(), tasksBuilder.build(), mock(Auditor.class)); assertFalse(waitForCloseRequest.hasJobsToWaitFor()); } public static void addTask(String datafeedId, long startTime, String nodeId, DatafeedState state, PersistentTasksCustomMetaData.Builder tasks) { - tasks.addTask(MlMetadata.datafeedTaskId(datafeedId), StartDatafeedAction.TASK_NAME, + tasks.addTask(MLMetadataField.datafeedTaskId(datafeedId), StartDatafeedAction.TASK_NAME, new StartDatafeedAction.DatafeedParams(datafeedId, startTime), new Assignment(nodeId, "test assignment")); - tasks.updateTaskStatus(MlMetadata.datafeedTaskId(datafeedId), state); + tasks.updateTaskStatus(MLMetadataField.datafeedTaskId(datafeedId), state); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/DeleteJobActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/DeleteJobActionTests.java index 40652768b3b..a2683d1d9e8 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/DeleteJobActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/DeleteJobActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; @@ -20,18 +21,18 @@ public class DeleteJobActionTests extends ESTestCase { MlMetadata mlMetadata = MlMetadata.EMPTY_METADATA; ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlMetadata)) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlMetadata)) .build(); - assertTrue(DeleteJobAction.TransportAction.jobIsDeletedFromState("job_id_1", clusterState)); + assertTrue(TransportDeleteJobAction.jobIsDeletedFromState("job_id_1", clusterState)); MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); mlBuilder.putJob(BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()), false); mlMetadata = mlBuilder.build(); clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlMetadata)) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlMetadata)) .build(); - assertFalse(DeleteJobAction.TransportAction.jobIsDeletedFromState("job_id_1", clusterState)); + assertFalse(TransportDeleteJobAction.jobIsDeletedFromState("job_id_1", clusterState)); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/DeleteJobRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/DeleteJobRequestTests.java index cb5819ac4ba..60217ccef5b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/DeleteJobRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/DeleteJobRequestTests.java @@ -6,19 +6,18 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.ml.action.DeleteJobAction.Request; -public class DeleteJobRequestTests extends AbstractStreamableTestCase { +public class DeleteJobRequestTests extends AbstractStreamableTestCase { @Override - protected Request createTestInstance() { - Request request = new Request(randomAlphaOfLengthBetween(1, 20)); + protected DeleteJobAction.Request createTestInstance() { + DeleteJobAction.Request request = new DeleteJobAction.Request(randomAlphaOfLengthBetween(1, 20)); request.setForce(randomBoolean()); return request; } @Override - protected Request createBlankInstance() { - return new Request(); + protected DeleteJobAction.Request createBlankInstance() { + return new DeleteJobAction.Request(); } } \ No newline at end of file diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/ForecastJobActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/ForecastJobActionRequestTests.java index a59502a76ae..5d96b9515df 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/ForecastJobActionRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/ForecastJobActionRequestTests.java @@ -77,7 +77,7 @@ public class ForecastJobActionRequestTests extends AbstractStreamableXContentTes jobBuilder.setJobVersion(Version.V_6_0_1); ForecastJobAction.Request request = new ForecastJobAction.Request(); Exception e = expectThrows(ElasticsearchStatusException.class, - () -> ForecastJobAction.TransportAction.validate(jobBuilder.build(), request)); + () -> TransportForecastJobAction.validate(jobBuilder.build(), request)); assertEquals( "Cannot run forecast because jobs created prior to version 6.1 are not supported", e.getMessage()); @@ -88,7 +88,7 @@ public class ForecastJobActionRequestTests extends AbstractStreamableXContentTes ForecastJobAction.Request request = new ForecastJobAction.Request(); Exception e = expectThrows(ElasticsearchStatusException.class, - () -> ForecastJobAction.TransportAction.validate(jobBuilder.build(), request)); + () -> TransportForecastJobAction.validate(jobBuilder.build(), request)); assertEquals( "Cannot run forecast because jobs created prior to version 6.1 are not supported", e.getMessage()); @@ -100,7 +100,7 @@ public class ForecastJobActionRequestTests extends AbstractStreamableXContentTes ForecastJobAction.Request request = new ForecastJobAction.Request(); request.setDuration(TimeValue.timeValueMinutes(1)); Exception e = expectThrows(ElasticsearchStatusException.class, - () -> ForecastJobAction.TransportAction.validate(jobBuilder.build(new Date()), request)); + () -> TransportForecastJobAction.validate(jobBuilder.build(new Date()), request)); assertEquals("[duration] must be greater or equal to the bucket span: [1m/1h]", e.getMessage()); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/GetJobsStatsActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/GetJobsStatsActionTests.java index da7e76d6705..7826c94ddf1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/GetJobsStatsActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/GetJobsStatsActionTests.java @@ -17,7 +17,7 @@ import java.util.Collections; import java.util.List; import java.util.Optional; -import static org.elasticsearch.xpack.ml.action.GetJobsStatsAction.TransportAction.determineNonDeletedJobIdsWithoutLiveStats; +import static org.elasticsearch.xpack.ml.action.TransportGetJobsStatsAction.determineNonDeletedJobIdsWithoutLiveStats; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -80,10 +80,10 @@ public class GetJobsStatsActionTests extends ESTestCase { } public void testDurationToTimeValue() { - assertNull(GetJobsStatsAction.TransportAction.durationToTimeValue(Optional.empty())); + assertNull(TransportGetJobsStatsAction.durationToTimeValue(Optional.empty())); Duration duration = Duration.ofSeconds(10L); - TimeValue timeValue = GetJobsStatsAction.TransportAction.durationToTimeValue(Optional.of(duration)); + TimeValue timeValue = TransportGetJobsStatsAction.durationToTimeValue(Optional.of(duration)); assertEquals(10L, timeValue.getSeconds()); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/OpenJobActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/OpenJobActionTests.java index ea5693e4858..05547100e86 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/OpenJobActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/OpenJobActionTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.MlMetadata; @@ -37,6 +38,7 @@ import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobTaskStatus; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; @@ -63,7 +65,7 @@ public class OpenJobActionTests extends ESTestCase { public void testValidate_jobMissing() { MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); mlBuilder.putJob(buildJobBuilder("job_id1").build(), false); - expectThrows(ResourceNotFoundException.class, () -> OpenJobAction.validate("job_id2", mlBuilder.build())); + expectThrows(ResourceNotFoundException.class, () -> TransportOpenJobAction.validate("job_id2", mlBuilder.build())); } public void testValidate_jobMarkedAsDeleted() { @@ -72,7 +74,7 @@ public class OpenJobActionTests extends ESTestCase { jobBuilder.setDeleted(true); mlBuilder.putJob(jobBuilder.build(), false); Exception e = expectThrows(ElasticsearchStatusException.class, - () -> OpenJobAction.validate("job_id", mlBuilder.build())); + () -> TransportOpenJobAction.validate("job_id", mlBuilder.build())); assertEquals("Cannot open job [job_id] because it has been marked as deleted", e.getMessage()); } @@ -81,7 +83,7 @@ public class OpenJobActionTests extends ESTestCase { Job.Builder jobBuilder = buildJobBuilder("job_id"); mlBuilder.putJob(jobBuilder.build(), false); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> OpenJobAction.validate("job_id", mlBuilder.build())); + () -> TransportOpenJobAction.validate("job_id", mlBuilder.build())); assertEquals("Cannot open job [job_id] because jobs created prior to version 5.5 are not supported", e.getMessage()); assertEquals(RestStatus.BAD_REQUEST, e.status()); } @@ -90,7 +92,7 @@ public class OpenJobActionTests extends ESTestCase { MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); Job.Builder jobBuilder = buildJobBuilder("job_id"); mlBuilder.putJob(jobBuilder.build(new Date()), false); - OpenJobAction.validate("job_id", mlBuilder.build()); + TransportOpenJobAction.validate("job_id", mlBuilder.build()); } public void testSelectLeastLoadedMlNode_byCount() { @@ -120,7 +122,7 @@ public class OpenJobActionTests extends ESTestCase { metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); cs.metaData(metaData); cs.routingTable(routingTable.build()); - Assignment result = OpenJobAction.selectLeastLoadedMlNode("job_id4", cs.build(), 2, 10, 30, logger); + Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id4", cs.build(), 2, 10, 30, logger); assertEquals("", result.getExplanation()); assertEquals("_node_id3", result.getExecutorNode()); } @@ -158,7 +160,7 @@ public class OpenJobActionTests extends ESTestCase { metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); cs.metaData(metaData); cs.routingTable(routingTable.build()); - Assignment result = OpenJobAction.selectLeastLoadedMlNode("job_id5", cs.build(), 2, 10, 30, logger); + Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id5", cs.build(), 2, 10, 30, logger); assertEquals("", result.getExplanation()); assertEquals("_node_id2", result.getExecutorNode()); } @@ -192,7 +194,7 @@ public class OpenJobActionTests extends ESTestCase { metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); cs.metaData(metaData); cs.routingTable(routingTable.build()); - Assignment result = OpenJobAction.selectLeastLoadedMlNode("job_id0", cs.build(), 2, maxRunningJobsPerNode, 30, logger); + Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id0", cs.build(), 2, maxRunningJobsPerNode, 30, logger); assertNull(result.getExecutorNode()); assertTrue(result.getExplanation().contains("because this node is full. Number of opened jobs [" + maxRunningJobsPerNode + "], xpack.ml.max_open_jobs [" + maxRunningJobsPerNode + "]")); @@ -218,7 +220,7 @@ public class OpenJobActionTests extends ESTestCase { metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); cs.metaData(metaData); cs.routingTable(routingTable.build()); - Assignment result = OpenJobAction.selectLeastLoadedMlNode("job_id2", cs.build(), 2, 10, 30, logger); + Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id2", cs.build(), 2, 10, 30, logger); assertTrue(result.getExplanation().contains("because this node isn't a ml node")); assertNull(result.getExecutorNode()); } @@ -253,7 +255,7 @@ public class OpenJobActionTests extends ESTestCase { csBuilder.metaData(metaData); ClusterState cs = csBuilder.build(); - Assignment result = OpenJobAction.selectLeastLoadedMlNode("job_id6", cs, 2, 10, 30, logger); + Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id6", cs, 2, 10, 30, logger); assertEquals("_node_id3", result.getExecutorNode()); tasksBuilder = PersistentTasksCustomMetaData.builder(tasks); @@ -263,7 +265,7 @@ public class OpenJobActionTests extends ESTestCase { csBuilder = ClusterState.builder(cs); csBuilder.metaData(MetaData.builder(cs.metaData()).putCustom(PersistentTasksCustomMetaData.TYPE, tasks)); cs = csBuilder.build(); - result = OpenJobAction.selectLeastLoadedMlNode("job_id7", cs, 2, 10, 30, logger); + result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id7", cs, 2, 10, 30, logger); assertNull("no node selected, because OPENING state", result.getExecutorNode()); assertTrue(result.getExplanation().contains("because node exceeds [2] the maximum number of jobs [2] in opening state")); @@ -274,7 +276,7 @@ public class OpenJobActionTests extends ESTestCase { csBuilder = ClusterState.builder(cs); csBuilder.metaData(MetaData.builder(cs.metaData()).putCustom(PersistentTasksCustomMetaData.TYPE, tasks)); cs = csBuilder.build(); - result = OpenJobAction.selectLeastLoadedMlNode("job_id7", cs, 2, 10, 30, logger); + result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id7", cs, 2, 10, 30, logger); assertNull("no node selected, because stale task", result.getExecutorNode()); assertTrue(result.getExplanation().contains("because node exceeds [2] the maximum number of jobs [2] in opening state")); @@ -285,7 +287,7 @@ public class OpenJobActionTests extends ESTestCase { csBuilder = ClusterState.builder(cs); csBuilder.metaData(MetaData.builder(cs.metaData()).putCustom(PersistentTasksCustomMetaData.TYPE, tasks)); cs = csBuilder.build(); - result = OpenJobAction.selectLeastLoadedMlNode("job_id7", cs, 2, 10, 30, logger); + result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id7", cs, 2, 10, 30, logger); assertNull("no node selected, because null state", result.getExecutorNode()); assertTrue(result.getExplanation().contains("because node exceeds [2] the maximum number of jobs [2] in opening state")); } @@ -320,7 +322,7 @@ public class OpenJobActionTests extends ESTestCase { metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); cs.metaData(metaData); cs.routingTable(routingTable.build()); - Assignment result = OpenJobAction.selectLeastLoadedMlNode("incompatible_type_job", cs.build(), 2, 10, 30, logger); + Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("incompatible_type_job", cs.build(), 2, 10, 30, logger); assertThat(result.getExplanation(), containsString("because this node does not support jobs of type [incompatible_type]")); assertNull(result.getExecutorNode()); } @@ -347,7 +349,7 @@ public class OpenJobActionTests extends ESTestCase { metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); cs.metaData(metaData); cs.routingTable(routingTable.build()); - Assignment result = OpenJobAction.selectLeastLoadedMlNode("incompatible_type_job", cs.build(), 2, 10, 30, logger); + Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("incompatible_type_job", cs.build(), 2, 10, 30, logger); assertThat(result.getExplanation(), containsString("because this node does not support jobs of version [" + Version.CURRENT + "]")); assertNull(result.getExecutorNode()); } @@ -362,12 +364,12 @@ public class OpenJobActionTests extends ESTestCase { csBuilder.metaData(metaData); ClusterState cs = csBuilder.build(); - assertEquals(0, OpenJobAction.verifyIndicesPrimaryShardsAreActive("job_id", cs).size()); + assertEquals(0, TransportOpenJobAction.verifyIndicesPrimaryShardsAreActive("job_id", cs).size()); metaData = new MetaData.Builder(cs.metaData()); routingTable = new RoutingTable.Builder(cs.routingTable()); - String indexToRemove = randomFrom(OpenJobAction.indicesOfInterest(cs, "job_id")); + String indexToRemove = randomFrom(TransportOpenJobAction.indicesOfInterest(cs, "job_id")); if (randomBoolean()) { routingTable.remove(indexToRemove); } else { @@ -382,7 +384,7 @@ public class OpenJobActionTests extends ESTestCase { csBuilder.routingTable(routingTable.build()); csBuilder.metaData(metaData); - List result = OpenJobAction.verifyIndicesPrimaryShardsAreActive("job_id", csBuilder.build()); + List result = TransportOpenJobAction.verifyIndicesPrimaryShardsAreActive("job_id", csBuilder.build()); assertEquals(1, result.size()); assertEquals(indexToRemove, result.get(0)); } @@ -392,57 +394,58 @@ public class OpenJobActionTests extends ESTestCase { ClusterState cs = csBuilder.build(); String[] indices = new String[] { "no_index" }; - assertArrayEquals(new String[] { "no_index" }, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + assertArrayEquals(new String[] { "no_index" }, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } public void testMappingRequiresUpdateNullMapping() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("null_mapping", null)); String[] indices = new String[] { "null_index" }; - assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } public void testMappingRequiresUpdateNoVersion() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("no_version_field", "NO_VERSION_FIELD")); String[] indices = new String[] { "no_version_field" }; - assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } public void testMappingRequiresUpdateRecentMappingVersion() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_current", Version.CURRENT.toString())); String[] indices = new String[] { "version_current" }; - assertArrayEquals(new String[] {}, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + assertArrayEquals(new String[] {}, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } public void testMappingRequiresUpdateMaliciousMappingVersion() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData( Collections.singletonMap("version_current", Collections.singletonMap("nested", "1.0"))); String[] indices = new String[] { "version_nested" }; - assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } public void testMappingRequiresUpdateOldMappingVersion() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_54", Version.V_5_4_0.toString())); String[] indices = new String[] { "version_54" }; - assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } public void testMappingRequiresUpdateBogusMappingVersion() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_bogus", "0.0")); String[] indices = new String[] { "version_bogus" }; - assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } public void testMappingRequiresUpdateNewerMappingVersion() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_newer", Version.CURRENT)); String[] indices = new String[] { "version_newer" }; - assertArrayEquals(new String[] {}, OpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousVersion(), logger)); + assertArrayEquals(new String[] {}, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousVersion(), + logger)); } public void testMappingRequiresUpdateNewerMappingVersionMinor() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_newer_minor", Version.CURRENT)); String[] indices = new String[] { "version_newer_minor" }; assertArrayEquals(new String[] {}, - OpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousMinorVersion(), logger)); + TransportOpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousMinorVersion(), logger)); } public void testMappingRequiresUpdateSomeVersionMix() throws IOException { @@ -457,7 +460,7 @@ public class OpenJobActionTests extends ESTestCase { ClusterState cs = getClusterStateWithMappingsWithMetaData(versionMix); String[] indices = new String[] { "version_54", "version_null", "version_bogus", "version_bogus2" }; - assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } public static void addJobTask(String jobId, String nodeId, JobState jobState, PersistentTasksCustomMetaData.Builder builder) { @@ -479,7 +482,7 @@ public class OpenJobActionTests extends ESTestCase { indices.add(AnomalyDetectorsIndex.jobStateIndexName()); indices.add(MlMetaIndex.INDEX_NAME); indices.add(Auditor.NOTIFICATIONS_INDEX); - indices.add(AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT); + indices.add(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); for (String indexName : indices) { IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); indexMetaData.settings(Settings.builder() @@ -503,7 +506,7 @@ public class OpenJobActionTests extends ESTestCase { Job job = jobCreator.apply(jobId); mlMetadata.putJob(job, false); } - metaData.putCustom(MlMetadata.TYPE, mlMetadata.build()); + metaData.putCustom(MLMetadataField.TYPE, mlMetadata.build()); } private ClusterState getClusterStateWithMappingsWithMetaData(Map namesAndVersions) throws IOException { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedActionTests.java index 92a43ac94ec..5eb05a8e27b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedActionTests.java @@ -61,7 +61,7 @@ public class PreviewDatafeedActionTests extends ESTestCase { public void testPreviewDatafed_GivenEmptyStream() throws IOException { when(dataExtractor.next()).thenReturn(Optional.empty()); - PreviewDatafeedAction.TransportAction.previewDatafeed(dataExtractor, actionListener); + TransportPreviewDatafeedAction.previewDatafeed(dataExtractor, actionListener); assertThat(capturedResponse, equalTo("[]")); assertThat(capturedFailure, is(nullValue())); @@ -73,7 +73,7 @@ public class PreviewDatafeedActionTests extends ESTestCase { InputStream stream = new ByteArrayInputStream(streamAsString.getBytes(StandardCharsets.UTF_8)); when(dataExtractor.next()).thenReturn(Optional.of(stream)); - PreviewDatafeedAction.TransportAction.previewDatafeed(dataExtractor, actionListener); + TransportPreviewDatafeedAction.previewDatafeed(dataExtractor, actionListener); assertThat(capturedResponse, equalTo("[{\"a\":1, \"b\":2},{\"c\":3, \"d\":4},{\"e\":5, \"f\":6}]")); assertThat(capturedFailure, is(nullValue())); @@ -83,7 +83,7 @@ public class PreviewDatafeedActionTests extends ESTestCase { public void testPreviewDatafed_GivenFailure() throws IOException { doThrow(new RuntimeException("failed")).when(dataExtractor).next(); - PreviewDatafeedAction.TransportAction.previewDatafeed(dataExtractor, actionListener); + TransportPreviewDatafeedAction.previewDatafeed(dataExtractor, actionListener); assertThat(capturedResponse, is(nullValue())); assertThat(capturedFailure.getMessage(), equalTo("failed")); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StartDatafeedActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StartDatafeedActionTests.java index c371e0306d2..2a3a95aee0c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StartDatafeedActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StartDatafeedActionTests.java @@ -31,7 +31,7 @@ public class StartDatafeedActionTests extends ESTestCase { .putJob(job, false) .build(); Exception e = expectThrows(ResourceNotFoundException.class, - () -> StartDatafeedAction.validate("some-datafeed", mlMetadata, null)); + () -> TransportStartDatafeedAction.validate("some-datafeed", mlMetadata, null)); assertThat(e.getMessage(), equalTo("No datafeed with id [some-datafeed] exists")); } @@ -46,7 +46,7 @@ public class StartDatafeedActionTests extends ESTestCase { .putDatafeed(datafeedConfig1, null) .build(); Exception e = expectThrows(ElasticsearchStatusException.class, - () -> StartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks)); + () -> TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks)); assertThat(e.getMessage(), equalTo("cannot start datafeed [foo-datafeed] because job [job_id] is closed")); } @@ -63,7 +63,7 @@ public class StartDatafeedActionTests extends ESTestCase { .putDatafeed(datafeedConfig1, null) .build(); - StartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); + TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); } public void testValidate_jobOpened() { @@ -79,14 +79,15 @@ public class StartDatafeedActionTests extends ESTestCase { .putDatafeed(datafeedConfig1, null) .build(); - StartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); + TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); } - public static StartDatafeedAction.DatafeedTask createDatafeedTask(long id, String type, String action, - TaskId parentTaskId, - StartDatafeedAction.DatafeedParams params, - DatafeedManager datafeedManager) { - StartDatafeedAction.DatafeedTask task = new StartDatafeedAction.DatafeedTask(id, type, action, parentTaskId, params); + public static TransportStartDatafeedAction.DatafeedTask createDatafeedTask(long id, String type, String action, + TaskId parentTaskId, + StartDatafeedAction.DatafeedParams params, + DatafeedManager datafeedManager) { + TransportStartDatafeedAction.DatafeedTask task = new TransportStartDatafeedAction.DatafeedTask(id, type, action, parentTaskId, + params); task.datafeedManager = datafeedManager; return task; } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StopDatafeedActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StopDatafeedActionRequestTests.java index 18b7714dfe5..665db11a0b3 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StopDatafeedActionRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StopDatafeedActionRequestTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.MlMetadata.Builder; import org.elasticsearch.xpack.ml.action.StopDatafeedAction.Request; @@ -62,22 +63,22 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe public void testValidate() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); - tasksBuilder.addTask(MlMetadata.datafeedTaskId("foo"), StartDatafeedAction.TASK_NAME, + tasksBuilder.addTask(MLMetadataField.datafeedTaskId("foo"), StartDatafeedAction.TASK_NAME, new StartDatafeedAction.DatafeedParams("foo", 0L), new Assignment("node_id", "")); - tasksBuilder.updateTaskStatus(MlMetadata.datafeedTaskId("foo"), DatafeedState.STARTED); + tasksBuilder.updateTaskStatus(MLMetadataField.datafeedTaskId("foo"), DatafeedState.STARTED); tasksBuilder.build(); Job job = createDatafeedJob().build(new Date()); MlMetadata mlMetadata1 = new MlMetadata.Builder().putJob(job, false).build(); Exception e = expectThrows(ResourceNotFoundException.class, - () -> StopDatafeedAction.validateDatafeedTask("foo", mlMetadata1)); + () -> TransportStopDatafeedAction.validateDatafeedTask("foo", mlMetadata1)); assertThat(e.getMessage(), equalTo("No datafeed with id [foo] exists")); DatafeedConfig datafeedConfig = createDatafeedConfig("foo", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false) .putDatafeed(datafeedConfig, null) .build(); - StopDatafeedAction.validateDatafeedTask("foo", mlMetadata2); + TransportStopDatafeedAction.validateDatafeedTask("foo", mlMetadata2); } public void testResolveDataFeedIds_GivenDatafeedId() { @@ -99,14 +100,14 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe List startedDatafeeds = new ArrayList<>(); List stoppingDatafeeds = new ArrayList<>(); - StopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("datafeed_1"), mlMetadata, tasks, startedDatafeeds, + TransportStopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("datafeed_1"), mlMetadata, tasks, startedDatafeeds, stoppingDatafeeds); assertEquals(Collections.singletonList("datafeed_1"), startedDatafeeds); assertEquals(Collections.emptyList(), stoppingDatafeeds); startedDatafeeds.clear(); stoppingDatafeeds.clear(); - StopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("datafeed_2"), mlMetadata, tasks, startedDatafeeds, + TransportStopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("datafeed_2"), mlMetadata, tasks, startedDatafeeds, stoppingDatafeeds); assertEquals(Collections.emptyList(), startedDatafeeds); assertEquals(Collections.emptyList(), stoppingDatafeeds); @@ -136,14 +137,14 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe List startedDatafeeds = new ArrayList<>(); List stoppingDatafeeds = new ArrayList<>(); - StopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("_all"), mlMetadata, tasks, startedDatafeeds, + TransportStopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("_all"), mlMetadata, tasks, startedDatafeeds, stoppingDatafeeds); assertEquals(Collections.singletonList("datafeed_1"), startedDatafeeds); assertEquals(Collections.singletonList("datafeed_3"), stoppingDatafeeds); startedDatafeeds.clear(); stoppingDatafeeds.clear(); - StopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("datafeed_2"), mlMetadata, tasks, startedDatafeeds, + TransportStopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("datafeed_2"), mlMetadata, tasks, startedDatafeeds, stoppingDatafeeds); assertEquals(Collections.emptyList(), startedDatafeeds); assertEquals(Collections.emptyList(), stoppingDatafeeds); @@ -151,9 +152,9 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe public static void addTask(String datafeedId, long startTime, String nodeId, DatafeedState state, PersistentTasksCustomMetaData.Builder taskBuilder) { - taskBuilder.addTask(MlMetadata.datafeedTaskId(datafeedId), StartDatafeedAction.TASK_NAME, + taskBuilder.addTask(MLMetadataField.datafeedTaskId(datafeedId), StartDatafeedAction.TASK_NAME, new StartDatafeedAction.DatafeedParams(datafeedId, startTime), new Assignment(nodeId, "test assignment")); - taskBuilder.updateTaskStatus(MlMetadata.datafeedTaskId(datafeedId), state); + taskBuilder.updateTaskStatus(MLMetadataField.datafeedTaskId(datafeedId), state); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java index c17cb8f2dde..2f54bc24fbf 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java @@ -23,10 +23,11 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.StartDatafeedAction; -import org.elasticsearch.xpack.ml.action.StartDatafeedAction.DatafeedTask; +import org.elasticsearch.xpack.ml.action.TransportStartDatafeedAction.DatafeedTask; import org.elasticsearch.xpack.ml.action.StartDatafeedActionTests; import org.elasticsearch.xpack.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.ml.job.config.AnalysisConfig; @@ -91,7 +92,7 @@ public class DatafeedManagerTests extends ESTestCase { Collections.emptyMap(), Collections.emptySet(), Version.CURRENT)) .build(); ClusterState.Builder cs = ClusterState.builder(new ClusterName("cluster_name")) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlMetadata.build()) + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, mlMetadata.build()) .putCustom(PersistentTasksCustomMetaData.TYPE, tasks)) .nodes(nodes); @@ -255,8 +256,8 @@ public class DatafeedManagerTests extends ESTestCase { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, clusterService.state().getMetaData().custom(MlMetadata.TYPE)) - .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, clusterService.state().getMetaData() + .custom(MLMetadataField.TYPE)).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); when(clusterService.state()).thenReturn(cs.build()); Consumer handler = mockConsumer(); @@ -270,8 +271,8 @@ public class DatafeedManagerTests extends ESTestCase { addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); addJobTask("another_job", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder anotherJobCs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, clusterService.state().getMetaData().custom(MlMetadata.TYPE)) - .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, clusterService.state().getMetaData() + .custom(MLMetadataField.TYPE)).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); capturedClusterStateListener.getValue().clusterChanged(new ClusterChangedEvent("_source", anotherJobCs.build(), cs.build())); @@ -281,8 +282,8 @@ public class DatafeedManagerTests extends ESTestCase { tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder jobOpenedCs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, clusterService.state().getMetaData().custom(MlMetadata.TYPE)) - .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, clusterService.state().getMetaData() + .custom(MLMetadataField.TYPE)).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); capturedClusterStateListener.getValue().clusterChanged( new ClusterChangedEvent("_source", jobOpenedCs.build(), anotherJobCs.build())); @@ -295,8 +296,8 @@ public class DatafeedManagerTests extends ESTestCase { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, clusterService.state().getMetaData().custom(MlMetadata.TYPE)) - .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, clusterService.state().getMetaData() + .custom(MLMetadataField.TYPE)).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); when(clusterService.state()).thenReturn(cs.build()); Consumer handler = mockConsumer(); @@ -309,8 +310,8 @@ public class DatafeedManagerTests extends ESTestCase { tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.FAILED, tasksBuilder); ClusterState.Builder updatedCs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, clusterService.state().getMetaData().custom(MlMetadata.TYPE)) - .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, clusterService.state().getMetaData() + .custom(MLMetadataField.TYPE)).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); capturedClusterStateListener.getValue().clusterChanged(new ClusterChangedEvent("_source", updatedCs.build(), cs.build())); @@ -323,8 +324,8 @@ public class DatafeedManagerTests extends ESTestCase { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, clusterService.state().getMetaData().custom(MlMetadata.TYPE)) - .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, clusterService.state().getMetaData() + .custom(MLMetadataField.TYPE)).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); when(clusterService.state()).thenReturn(cs.build()); Consumer handler = mockConsumer(); @@ -341,8 +342,8 @@ public class DatafeedManagerTests extends ESTestCase { tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder updatedCs = ClusterState.builder(clusterService.state()) - .metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, clusterService.state().getMetaData().custom(MlMetadata.TYPE)) - .putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + .metaData(new MetaData.Builder().putCustom(MLMetadataField.TYPE, clusterService.state().getMetaData() + .custom(MLMetadataField.TYPE)).putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); capturedClusterStateListener.getValue().clusterChanged(new ClusterChangedEvent("_source", cs.build(), updatedCs.build())); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index 672a2987d1d..a88b9fbebfb 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobState; @@ -315,7 +316,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase { clusterState = ClusterState.builder(new ClusterName("cluster_name")) .metaData(new MetaData.Builder() - .putCustom(MlMetadata.TYPE, mlMetadata) + .putCustom(MLMetadataField.TYPE, mlMetadata) .putCustom(PersistentTasksCustomMetaData.TYPE, tasks) .put(indexMetaData, false)) .nodes(nodes) diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java index 847b87fba85..8a9c0dc4e93 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.DeleteJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction; @@ -103,7 +104,7 @@ public class DeleteJobIT extends BaseMlIntegTestCase { } private ClusterState markJobAsDeleted(String jobId, ClusterState currentState) { - MlMetadata mlMetadata = currentState.metaData().custom(MlMetadata.TYPE); + MlMetadata mlMetadata = currentState.metaData().custom(MLMetadataField.TYPE); assertNotNull(mlMetadata); MlMetadata.Builder builder = new MlMetadata.Builder(mlMetadata); @@ -111,14 +112,16 @@ public class DeleteJobIT extends BaseMlIntegTestCase { builder.markJobAsDeleted(jobId, tasks, true); ClusterState.Builder newState = ClusterState.builder(currentState); - return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()).build(); + return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, builder.build()).build()) + .build(); } private ClusterState removeJobFromClusterState(String jobId, ClusterState currentState) { - MlMetadata.Builder builder = new MlMetadata.Builder(currentState.metaData().custom(MlMetadata.TYPE)); + MlMetadata.Builder builder = new MlMetadata.Builder(currentState.metaData().custom(MLMetadataField.TYPE)); builder.deleteJob(jobId, currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE)); ClusterState.Builder newState = ClusterState.builder(currentState); - return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build()).build(); + return newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, builder.build()).build()) + .build(); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index 15cbb6a5fdd..e29d9d1642c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.ml.job.config.AnalysisConfig; +import org.elasticsearch.xpack.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.ml.job.config.DataDescription; import org.elasticsearch.xpack.ml.job.config.Detector; import org.elasticsearch.xpack.ml.job.config.Job; @@ -16,11 +17,13 @@ import org.elasticsearch.xpack.ml.job.results.Forecast; import org.elasticsearch.xpack.ml.job.results.ForecastRequestStats; import org.junit.After; +import java.io.IOException; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; @@ -156,6 +159,96 @@ public class ForecastIT extends MlNativeAutodetectIntegTestCase { equalTo("[duration] must be greater or equal to the bucket span: [10m/1h]")); } + public void testNoData() throws Exception { + Detector.Builder detector = new Detector.Builder("mean", "value"); + + TimeValue bucketSpan = TimeValue.timeValueMinutes(1); + AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); + analysisConfig.setBucketSpan(bucketSpan); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeFormat("epoch"); + Job.Builder job = new Job.Builder("forecast-it-test-no-data"); + job.setAnalysisConfig(analysisConfig); + job.setDataDescription(dataDescription); + + registerJob(job); + putJob(job); + openJob(job.getId()); + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> forecast(job.getId(), TimeValue.timeValueMinutes(120), null)); + assertThat(e.getMessage(), + equalTo("Cannot run forecast: Forecast cannot be executed as job requires data to have been processed and modeled")); + } + + public void testMemoryStatus() throws Exception { + Detector.Builder detector = new Detector.Builder("mean", "value"); + detector.setByFieldName("clientIP"); + + TimeValue bucketSpan = TimeValue.timeValueHours(1); + AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); + analysisConfig.setBucketSpan(bucketSpan); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeFormat("epoch"); + Job.Builder job = new Job.Builder("forecast-it-test-memory-status"); + job.setAnalysisConfig(analysisConfig); + job.setDataDescription(dataDescription); + + // Set the memory limit to 30MB + AnalysisLimits limits = new AnalysisLimits(30L, null); + job.setAnalysisLimits(limits); + + registerJob(job); + putJob(job); + openJob(job.getId()); + createDataWithLotsOfClientIps(bucketSpan, job); + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> forecast(job.getId(), TimeValue.timeValueMinutes(120), null)); + assertThat(e.getMessage(), equalTo("Cannot run forecast: Forecast cannot be executed as model memory status is not OK")); + } + + public void testMemoryLimit() throws Exception { + Detector.Builder detector = new Detector.Builder("mean", "value"); + detector.setByFieldName("clientIP"); + + TimeValue bucketSpan = TimeValue.timeValueHours(1); + AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); + analysisConfig.setBucketSpan(bucketSpan); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeFormat("epoch"); + Job.Builder job = new Job.Builder("forecast-it-test-memory-limit"); + job.setAnalysisConfig(analysisConfig); + job.setDataDescription(dataDescription); + + registerJob(job); + putJob(job); + openJob(job.getId()); + createDataWithLotsOfClientIps(bucketSpan, job); + ElasticsearchException e = expectThrows(ElasticsearchException.class, + () -> forecast(job.getId(), TimeValue.timeValueMinutes(120), null)); + assertThat(e.getMessage(), + equalTo("Cannot run forecast: Forecast cannot be executed as forecast memory usage is predicted to exceed 20MB")); + } + + private void createDataWithLotsOfClientIps(TimeValue bucketSpan, Job.Builder job) throws IOException { + long now = Instant.now().getEpochSecond(); + long timestamp = now - 50 * bucketSpan.seconds(); + while (timestamp < now) { + for (int i = 1; i < 256; i++) { + List data = new ArrayList<>(); + for (int j = 1; j < 100; j++) { + Map record = new HashMap<>(); + record.put("time", timestamp); + record.put("value", 10.0); + record.put("clientIP", String.format(Locale.ROOT, "192.168.%d.%d", i, j)); + data.add(createJsonRecord(record)); + } + postData(job.getId(), data.stream().collect(Collectors.joining())); + timestamp += bucketSpan.seconds(); + } + } + flushJob(job.getId(), false); + } + private static Map createRecord(long timestamp, double value) { Map record = new HashMap<>(); record.put("time", timestamp); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 9439254245b..6207fecb04c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; import org.junit.After; @@ -199,7 +200,7 @@ public class MlJobIT extends ESRestTestCase { response = client().performRequest("get", "_cat/indices"); assertEquals(200, response.getStatusLine().getStatusCode()); responseAsString = responseEntityToString(response); - assertThat(responseAsString, containsString(AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + "custom-" + indexName)); + assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)))); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)))); @@ -244,11 +245,11 @@ public class MlJobIT extends ESRestTestCase { response = client().performRequest("get", "_cat/indices"); assertEquals(200, response.getStatusLine().getStatusCode()); responseAsString = responseEntityToString(response); - assertThat(responseAsString, containsString(AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + "custom-" + indexName)); + assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); client().performRequest("post", "_refresh"); - response = client().performRequest("get", AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + "custom-" + indexName + "/_count"); + response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName + "/_count"); assertEquals(200, response.getStatusLine().getStatusCode()); responseAsString = responseEntityToString(response); assertThat(responseAsString, containsString("\"count\":0")); @@ -273,8 +274,8 @@ public class MlJobIT extends ESRestTestCase { assertEquals(200, response.getStatusLine().getStatusCode()); // Check the index mapping contains the first by_field_name - response = client().performRequest("get", AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX - + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT + "/_mapping?pretty"); + response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT + "/_mapping?pretty"); assertEquals(200, response.getStatusLine().getStatusCode()); String responseAsString = responseEntityToString(response); assertThat(responseAsString, containsString(byFieldName1)); @@ -286,8 +287,8 @@ public class MlJobIT extends ESRestTestCase { assertEquals(200, response.getStatusLine().getStatusCode()); // Check the index mapping now contains both fields - response = client().performRequest("get", AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX - + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT + "/_mapping?pretty"); + response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT + "/_mapping?pretty"); assertEquals(200, response.getStatusLine().getStatusCode()); responseAsString = responseEntityToString(response); assertThat(responseAsString, containsString(byFieldName1)); @@ -313,7 +314,8 @@ public class MlJobIT extends ESRestTestCase { assertEquals(200, response.getStatusLine().getStatusCode()); // Check the index mapping contains the first by_field_name - response = client().performRequest("get", AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + "custom-shared-index" + "/_mapping?pretty"); + response = client().performRequest("get", + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-shared-index" + "/_mapping?pretty"); assertEquals(200, response.getStatusLine().getStatusCode()); String responseAsString = responseEntityToString(response); assertThat(responseAsString, containsString(byFieldName1)); @@ -325,7 +327,8 @@ public class MlJobIT extends ESRestTestCase { assertEquals(200, response.getStatusLine().getStatusCode()); // Check the index mapping now contains both fields - response = client().performRequest("get", AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + "custom-shared-index" + "/_mapping?pretty"); + response = client().performRequest("get", + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-shared-index" + "/_mapping?pretty"); assertEquals(200, response.getStatusLine().getStatusCode()); responseAsString = responseEntityToString(response); assertThat(responseAsString, containsString(byFieldName1)); @@ -370,7 +373,7 @@ public class MlJobIT extends ESRestTestCase { public void testDeleteJob() throws Exception { String jobId = "delete-job-job"; - String indexName = AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT; + String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); Response response = client().performRequest("get", "_cat/indices"); @@ -407,7 +410,7 @@ public class MlJobIT extends ESRestTestCase { public void testDeleteJobAfterMissingIndex() throws Exception { String jobId = "delete-job-after-missing-index-job"; String aliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); - String indexName = AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT; + String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); Response response = client().performRequest("get", "_cat/indices"); @@ -438,7 +441,7 @@ public class MlJobIT extends ESRestTestCase { String jobId = "delete-job-after-missing-alias-job"; String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); - String indexName = AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT; + String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); Response response = client().performRequest("get", "_cat/aliases"); @@ -464,7 +467,7 @@ public class MlJobIT extends ESRestTestCase { public void testMultiIndexDelete() throws Exception { String jobId = "multi-index-delete-job"; - String indexName = AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT; + String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); Response response = client().performRequest("put", indexName + "-001"); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java index 2b6dbefc849..9d459c5876c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java @@ -18,7 +18,8 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MLMetadataField; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.util.QueryPage; @@ -68,7 +69,7 @@ public class JobManagerTests extends ESTestCase { Job job = buildJobBuilder("foo").build(); MlMetadata mlMetadata = new MlMetadata.Builder().putJob(job, false).build(); ClusterState cs = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata)).build(); + .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, mlMetadata)).build(); assertEquals(job, JobManager.getJobOrThrowIfUnknown("foo", cs)); } @@ -79,7 +80,7 @@ public class JobManagerTests extends ESTestCase { mlMetadata.putJob(buildJobBuilder(Integer.toString(i)).build(), false); } ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata.build())).build(); + .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, mlMetadata.build())).build(); JobManager jobManager = createJobManager(); QueryPage result = jobManager.expandJobs("_all", true, clusterState); @@ -135,7 +136,7 @@ public class JobManagerTests extends ESTestCase { MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); mlMetadata.putJob(buildJobBuilder("foo").build(), false); ClusterState clusterState = ClusterState.builder(new ClusterName("name")) - .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata.build())).build(); + .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, mlMetadata.build())).build(); jobManager.putJob(putJobRequest, clusterState, new ActionListener() { @Override @@ -164,7 +165,8 @@ public class JobManagerTests extends ESTestCase { private JobManager createJobManager() { Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - ClusterSettings clusterSettings = new ClusterSettings(settings, Collections.singleton(MachineLearning.MAX_MODEL_MEMORY_LIMIT)); + ClusterSettings clusterSettings = new ClusterSettings(settings, + Collections.singleton(MachineLearningClientActionPlugin.MAX_MODEL_MEMORY_LIMIT)); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); UpdateJobProcessNotifier notifier = mock(UpdateJobProcessNotifier.class); return new JobManager(settings, jobProvider, clusterService, auditor, client, notifier); @@ -172,7 +174,7 @@ public class JobManagerTests extends ESTestCase { private ClusterState createClusterState() { ClusterState.Builder builder = ClusterState.builder(new ClusterName("_name")); - builder.metaData(MetaData.builder().putCustom(MlMetadata.TYPE, MlMetadata.EMPTY_METADATA)); + builder.metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, MlMetadata.EMPTY_METADATA)); return builder.build(); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java index f28cee0e80e..47ceb24f2cb 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java @@ -19,9 +19,9 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin; import org.elasticsearch.xpack.ml.job.messages.Messages; -import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndexFields; import java.io.IOException; import java.util.ArrayList; @@ -145,7 +145,7 @@ public class JobTests extends AbstractSerializingTestCase { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.validateModelMemoryLimit(new ByteSizeValue(1000L, ByteSizeUnit.MB))); assertEquals("model_memory_limit [4gb] must be less than the value of the " + - MachineLearning.MAX_MODEL_MEMORY_LIMIT.getKey() + " setting [1000mb]", e.getMessage()); + MachineLearningClientActionPlugin.MAX_MODEL_MEMORY_LIMIT.getKey() + " setting [1000mb]", e.getMessage()); builder.validateModelMemoryLimit(new ByteSizeValue(8192L, ByteSizeUnit.MB)); } @@ -411,14 +411,15 @@ public class JobTests extends AbstractSerializingTestCase { public void testBuilder_setsDefaultIndexName() { Job.Builder builder = buildJobBuilder("foo"); Job job = builder.build(); - assertEquals(AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT, job.getResultsIndexName()); + assertEquals(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT, + job.getResultsIndexName()); } public void testBuilder_setsIndexName() { Job.Builder builder = buildJobBuilder("foo"); builder.setResultsIndexName("carol"); Job job = builder.build(); - assertEquals(AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + "custom-carol", job.getResultsIndexName()); + assertEquals(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-carol", job.getResultsIndexName()); } public void testBuilder_withInvalidIndexNameThrows() { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java index 0e781b4dbf1..91e544aeb51 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.job.config.Job; @@ -76,7 +77,7 @@ public class JobProviderTests extends ESTestCase { @SuppressWarnings("unchecked") public void testCreateJobResultsIndex() { - String resultsIndexName = AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT; + String resultsIndexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; QueryBuilder jobFilter = QueryBuilders.termQuery("job_id", "foo"); MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); @@ -90,7 +91,8 @@ public class JobProviderTests extends ESTestCase { AtomicReference resultHolder = new AtomicReference<>(); ClusterState cs = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, MlMetadata.EMPTY_METADATA).indices(ImmutableOpenMap.of())).build(); + .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, MlMetadata.EMPTY_METADATA).indices(ImmutableOpenMap.of())) + .build(); ClusterService clusterService = mock(ClusterService.class); @@ -153,7 +155,7 @@ public class JobProviderTests extends ESTestCase { .fPut(AnomalyDetectorsIndex.jobResultsAliasedName("foo"), indexMetaData).build(); ClusterState cs2 = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, MlMetadata.EMPTY_METADATA).indices(indexMap)).build(); + .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, MlMetadata.EMPTY_METADATA).indices(indexMap)).build(); ClusterService clusterService = mock(ClusterService.class); @@ -185,7 +187,7 @@ public class JobProviderTests extends ESTestCase { @SuppressWarnings("unchecked") public void testCreateJobRelatedIndicies_createsAliasBecauseIndexNameIsSet() { - String indexName = AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + "custom-bar"; + String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-bar"; String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName("foo"); String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias("foo"); QueryBuilder jobFilter = QueryBuilders.termQuery("job_id", "foo"); @@ -205,7 +207,7 @@ public class JobProviderTests extends ESTestCase { ImmutableOpenMap indexMap = ImmutableOpenMap.builder().build(); ClusterState cs = ClusterState.builder(new ClusterName("_name")) - .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, MlMetadata.EMPTY_METADATA).indices(indexMap)).build(); + .metaData(MetaData.builder().putCustom(MLMetadataField.TYPE, MlMetadata.EMPTY_METADATA).indices(indexMap)).build(); ClusterService clusterService = mock(ClusterService.class); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 4e89bbeccf6..23a11d10eb0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.ml.action.OpenJobAction.JobTask; +import org.elasticsearch.xpack.ml.action.TransportOpenJobAction.JobTask; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.ml.job.config.DataDescription; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java index bef60071a61..832ca7028ca 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.ml.job.config.Job; @@ -185,7 +186,7 @@ public class ExpiredModelSnapshotsRemoverTests extends ESTestCase { MlMetadata mlMetadata = mock(MlMetadata.class); when(mlMetadata.getJobs()).thenReturn(jobsMap); MetaData metadata = mock(MetaData.class); - when(metadata.custom(MlMetadata.TYPE)).thenReturn(mlMetadata); + when(metadata.custom(MLMetadataField.TYPE)).thenReturn(mlMetadata); when(clusterState.getMetaData()).thenReturn(metadata); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java index 18128f6d6d0..d9ae8560b97 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobTests; @@ -164,7 +165,7 @@ public class ExpiredResultsRemoverTests extends ESTestCase { MlMetadata mlMetadata = mock(MlMetadata.class); when(mlMetadata.getJobs()).thenReturn(jobsMap); MetaData metadata = mock(MetaData.class); - when(metadata.custom(MlMetadata.TYPE)).thenReturn(mlMetadata); + when(metadata.custom(MLMetadataField.TYPE)).thenReturn(mlMetadata); when(clusterState.getMetaData()).thenReturn(metadata); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/modelsnapshots/GetModelSnapshotsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/modelsnapshots/GetModelSnapshotsTests.java index 4a28de2b225..817dee6fd03 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/modelsnapshots/GetModelSnapshotsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/modelsnapshots/GetModelSnapshotsTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.modelsnapshots; import org.elasticsearch.common.ParseField; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ml.action.GetModelSnapshotsAction; +import org.elasticsearch.xpack.ml.action.TransportGetModelSnapshotsAction; import org.elasticsearch.xpack.ml.action.util.PageParams; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; @@ -36,7 +37,7 @@ public class GetModelSnapshotsTests extends ESTestCase { ModelSnapshot m2 = new ModelSnapshot.Builder("jobId").build(); QueryPage page = new QueryPage<>(Arrays.asList(m1, m2), 2, new ParseField("field")); - page = GetModelSnapshotsAction.TransportAction.clearQuantiles(page); + page = TransportGetModelSnapshotsAction.clearQuantiles(page); assertEquals(2, page.results().size()); for (ModelSnapshot modelSnapshot : page.results()) { assertNull(modelSnapshot.getQuantiles()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index bff8b4e6836..d1e7949d469 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.ml.MLMetadataField; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.CloseJobAction; @@ -268,7 +269,7 @@ public abstract class BaseMlIntegTestCase extends ESIntegTestCase { public static void deleteAllDatafeeds(Logger logger, Client client) throws Exception { MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData(); - MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE); + MlMetadata mlMetadata = metaData.custom(MLMetadataField.TYPE); try { logger.info("Closing all datafeeds (using _all)"); StopDatafeedAction.Response stopResponse = client @@ -308,7 +309,7 @@ public abstract class BaseMlIntegTestCase extends ESIntegTestCase { public static void deleteAllJobs(Logger logger, Client client) throws Exception { MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData(); - MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE); + MlMetadata mlMetadata = metaData.custom(MLMetadataField.TYPE); try { CloseJobAction.Request closeRequest = new CloseJobAction.Request(MetaData.ALL); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringHistoryDurationSettingsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringHistoryDurationSettingsTests.java index e89ae42de1d..7770b670805 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringHistoryDurationSettingsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringHistoryDurationSettingsTests.java @@ -15,22 +15,22 @@ public class MonitoringHistoryDurationSettingsTests extends ESTestCase { TimeValue sevenDays = TimeValue.timeValueHours(7 * 24); // 7 days - assertEquals(sevenDays, Monitoring.HISTORY_DURATION.get(Settings.EMPTY)); + assertEquals(sevenDays, MonitoringField.HISTORY_DURATION.get(Settings.EMPTY)); // Note: this verifies the semantics because this is taken for granted that it never returns null! - assertEquals(sevenDays, Monitoring.HISTORY_DURATION.get(buildSettings(Monitoring.HISTORY_DURATION.getKey(), null))); + assertEquals(sevenDays, MonitoringField.HISTORY_DURATION.get(buildSettings(MonitoringField.HISTORY_DURATION.getKey(), null))); } public void testHistoryDurationMinimum24Hours() { // hit the minimum - assertEquals(Monitoring.HISTORY_DURATION_MINIMUM, - Monitoring.HISTORY_DURATION.get(buildSettings(Monitoring.HISTORY_DURATION.getKey(), "24h"))); + assertEquals(MonitoringField.HISTORY_DURATION_MINIMUM, + MonitoringField.HISTORY_DURATION.get(buildSettings(MonitoringField.HISTORY_DURATION.getKey(), "24h"))); } public void testHistoryDurationMinimum24HoursBlocksLower() { // 1 ms early! - final String oneSecondEarly = (Monitoring.HISTORY_DURATION_MINIMUM.millis() - 1) + "ms"; + final String oneSecondEarly = (MonitoringField.HISTORY_DURATION_MINIMUM.millis() - 1) + "ms"; expectThrows(IllegalArgumentException.class, - () -> Monitoring.HISTORY_DURATION.get(buildSettings(Monitoring.HISTORY_DURATION.getKey(), oneSecondEarly))); + () -> MonitoringField.HISTORY_DURATION.get(buildSettings(MonitoringField.HISTORY_DURATION.getKey(), oneSecondEarly))); } private Settings buildSettings(String key, String value) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java index 85bf8d8855c..21405c225f4 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.monitoring.cleaner; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.xpack.monitoring.Monitoring; +import org.elasticsearch.xpack.monitoring.MonitoringField; import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.exporter.Exporters; @@ -154,7 +154,7 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest public void testRetentionAsGlobalSetting() throws Exception { final int max = 10; final int retention = randomIntBetween(1, max); - internalCluster().startNode(Settings.builder().put(Monitoring.HISTORY_DURATION.getKey(), + internalCluster().startNode(Settings.builder().put(MonitoringField.HISTORY_DURATION.getKey(), String.format(Locale.ROOT, "%dd", retention))); final DateTime now = now(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerServiceTests.java index 96585f29753..3f3f9d5ba81 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/CleanerServiceTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.monitoring.Monitoring; +import org.elasticsearch.xpack.monitoring.MonitoringField; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.After; @@ -40,7 +40,7 @@ public class CleanerServiceTests extends ESTestCase { @Before public void start() { - clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.singleton(Monitoring.HISTORY_DURATION)); + clusterSettings = new ClusterSettings(Settings.EMPTY, Collections.singleton(MonitoringField.HISTORY_DURATION)); threadPool = new TestThreadPool("CleanerServiceTests"); } @@ -54,14 +54,14 @@ public class CleanerServiceTests extends ESTestCase { expectedException.expect(IllegalArgumentException.class); TimeValue expected = TimeValue.timeValueHours(1); - Settings settings = Settings.builder().put(Monitoring.HISTORY_DURATION.getKey(), expected.getStringRep()).build(); + Settings settings = Settings.builder().put(MonitoringField.HISTORY_DURATION.getKey(), expected.getStringRep()).build(); new CleanerService(settings, clusterSettings, threadPool, licenseState); } public void testGetRetentionWithSettingWithUpdatesAllowed() { TimeValue expected = TimeValue.timeValueHours(25); - Settings settings = Settings.builder().put(Monitoring.HISTORY_DURATION.getKey(), expected.getStringRep()).build(); + Settings settings = Settings.builder().put(MonitoringField.HISTORY_DURATION.getKey(), expected.getStringRep()).build(); when(licenseState.isUpdateRetentionAllowed()).thenReturn(true); @@ -73,7 +73,7 @@ public class CleanerServiceTests extends ESTestCase { public void testGetRetentionDefaultValueWithNoSettings() { when(licenseState.isUpdateRetentionAllowed()).thenReturn(true); - assertEquals(Monitoring.HISTORY_DURATION.get(Settings.EMPTY), + assertEquals(MonitoringField.HISTORY_DURATION.get(Settings.EMPTY), new CleanerService(Settings.EMPTY, clusterSettings, threadPool, licenseState).getRetention()); verify(licenseState).isUpdateRetentionAllowed(); @@ -81,11 +81,11 @@ public class CleanerServiceTests extends ESTestCase { public void testGetRetentionDefaultValueWithSettingsButUpdatesNotAllowed() { TimeValue notExpected = TimeValue.timeValueHours(25); - Settings settings = Settings.builder().put(Monitoring.HISTORY_DURATION.getKey(), notExpected.getStringRep()).build(); + Settings settings = Settings.builder().put(MonitoringField.HISTORY_DURATION.getKey(), notExpected.getStringRep()).build(); when(licenseState.isUpdateRetentionAllowed()).thenReturn(false); - assertEquals(Monitoring.HISTORY_DURATION.get(Settings.EMPTY), + assertEquals(MonitoringField.HISTORY_DURATION.get(Settings.EMPTY), new CleanerService(settings, clusterSettings, threadPool, licenseState).getRetention()); verify(licenseState).isUpdateRetentionAllowed(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java index fef581803cb..7f12001fc1c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityFeatureSetTests.java @@ -16,8 +16,8 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.XPackFeatureSet; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; @@ -166,7 +166,7 @@ public class SecurityFeatureSetTests extends ESTestCase { XPackFeatureSet.Usage serializedUsage = new SecurityFeatureSet.Usage(out.bytes().streamInput()); for (XPackFeatureSet.Usage usage : Arrays.asList(securityUsage, serializedUsage)) { assertThat(usage, is(notNullValue())); - assertThat(usage.name(), is(XPackPlugin.SECURITY)); + assertThat(usage.name(), is(XpackField.SECURITY)); assertThat(usage.enabled(), is(enabled)); assertThat(usage.available(), is(authcAuthzAvailable)); XContentSource source; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/SecuritySettingsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/SecuritySettingsTests.java index 1f2287d6480..923b4954100 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/SecuritySettingsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/SecuritySettingsTests.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.security; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail; import java.util.Collections; @@ -55,7 +55,7 @@ public class SecuritySettingsTests extends ESTestCase { Settings settings = Settings.builder().put("tribe.t1.cluster.name", "non_existing") .put(TRIBE_T1_SECURITY_ENABLED, false) .put("tribe.t2.cluster.name", "non_existing") - .putList("tribe.t1.plugin.mandatory", "test_plugin", XPackPlugin.NAME).build(); + .putList("tribe.t1.plugin.mandatory", "test_plugin", XpackField.NAME).build(); try { Security.additionalSettings(settings, false); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 37837619a72..e8fe71aa653 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -229,12 +229,12 @@ public class SecurityTests extends ESTestCase { public void testSettingFilter() throws Exception { createComponents(Settings.EMPTY); final List filter = security.getSettingsFilter(null); - assertThat(filter, hasItem(Security.setting("authc.realms.*.bind_dn"))); - assertThat(filter, hasItem(Security.setting("authc.realms.*.bind_password"))); - assertThat(filter, hasItem(Security.setting("authc.realms.*." + SessionFactory.HOSTNAME_VERIFICATION_SETTING))); - assertThat(filter, hasItem(Security.setting("authc.realms.*.ssl.truststore.password"))); - assertThat(filter, hasItem(Security.setting("authc.realms.*.ssl.truststore.path"))); - assertThat(filter, hasItem(Security.setting("authc.realms.*.ssl.truststore.algorithm"))); + assertThat(filter, hasItem(SecurityField.setting("authc.realms.*.bind_dn"))); + assertThat(filter, hasItem(SecurityField.setting("authc.realms.*.bind_password"))); + assertThat(filter, hasItem(SecurityField.setting("authc.realms.*." + SessionFactory.HOSTNAME_VERIFICATION_SETTING))); + assertThat(filter, hasItem(SecurityField.setting("authc.realms.*.ssl.truststore.password"))); + assertThat(filter, hasItem(SecurityField.setting("authc.realms.*.ssl.truststore.path"))); + assertThat(filter, hasItem(SecurityField.setting("authc.realms.*.ssl.truststore.algorithm"))); } public void testJoinValidatorOnDisabledSecurity() throws Exception { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index 9a79c171ce2..390f0cc062d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; +import org.elasticsearch.xpack.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.permission.Role; @@ -121,8 +122,8 @@ public class SecurityActionFilterTests extends ESTestCase { doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[3]; - assertNull(threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq("_action"), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); @@ -130,15 +131,15 @@ public class SecurityActionFilterTests extends ESTestCase { doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; - assertEquals(authentication, threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); + assertEquals(authentication, threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); callback.onResponse(empty); return Void.TYPE; }).when(authzService).roles(any(User.class), any(ActionListener.class)); - assertNull(threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); filter.apply(task, "_action", request, listener, chain); - assertNull(threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); verify(authzService).authorize(authentication, "_action", request, empty, null); verify(chain).proceed(eq(task), eq("_action"), eq(request), isA(ContextPreservingActionListener.class)); } @@ -150,31 +151,31 @@ public class SecurityActionFilterTests extends ESTestCase { Authentication authentication = new Authentication(user, new RealmRef("test", "test", "foo"), null); SetOnce authenticationSetOnce = new SetOnce<>(); ActionFilterChain chain = (task, action, request1, listener1) -> { - authenticationSetOnce.set(threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); + authenticationSetOnce.set(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); }; Task task = mock(Task.class); final boolean hasExistingAuthentication = randomBoolean(); final String action = "internal:foo"; if (hasExistingAuthentication) { - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); - threadContext.putHeader(Authentication.AUTHENTICATION_KEY, "foo"); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); + threadContext.putHeader(AuthenticationField.AUTHENTICATION_KEY, "foo"); threadContext.putTransient(AuthorizationService.ORIGINATING_ACTION_KEY, "indices:foo"); } else { - assertNull(threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); } doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[3]; - callback.onResponse(threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); + callback.onResponse(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); return Void.TYPE; }).when(authcService).authenticate(eq(action), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); filter.apply(task, action, request, listener, chain); if (hasExistingAuthentication) { - assertEquals(authentication, threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); + assertEquals(authentication, threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); } else { - assertNull(threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); + assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); } assertNotNull(authenticationSetOnce.get()); assertNotEquals(authentication, authenticationSetOnce.get()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java index e906e455a16..afa12b9ce14 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.security.action.user.HasPrivilegesResponse.IndexPrivileges; import org.elasticsearch.xpack.security.authc.Authentication; +import org.elasticsearch.xpack.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authz.permission.Role; @@ -60,7 +61,7 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { x -> null, null); final Authentication authentication = mock(Authentication.class); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); when(threadPool.getThreadContext()).thenReturn(threadContext); when(authentication.getUser()).thenReturn(user); @@ -169,18 +170,10 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { .cluster(ClusterPrivilege.MONITOR) .build(); - final HasPrivilegesRequest request = new HasPrivilegesRequest(); - request.username(user.principal()); - request.clusterPrivileges(Strings.EMPTY_ARRAY); - request.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder() + final HasPrivilegesResponse response = hasPrivileges(RoleDescriptor.IndicesPrivileges.builder() .indices("academy") .privileges("read", "write") - .build()); - final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); - - final HasPrivilegesResponse response = future.get(); - assertThat(response, notNullValue()); + .build(), Strings.EMPTY_ARRAY); assertThat(response.isCompleteMatch(), is(false)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); final IndexPrivileges result = response.getIndexPrivileges().get(0); @@ -261,20 +254,10 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { .add(IndexPrivilege.DELETE, "apache-2016-*") .build(); - final HasPrivilegesRequest request = new HasPrivilegesRequest(); - request.username(user.principal()); - request.clusterPrivileges(Strings.EMPTY_ARRAY); - request.indexPrivileges( - RoleDescriptor.IndicesPrivileges.builder() - .indices("apache-2016-12", "apache-2017-01") - .privileges("index", "delete") - .build() - ); - final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); - - final HasPrivilegesResponse response = future.get(); - assertThat(response, notNullValue()); + final HasPrivilegesResponse response = hasPrivileges(RoleDescriptor.IndicesPrivileges.builder() + .indices("apache-2016-12", "apache-2017-01") + .privileges("index", "delete") + .build(), Strings.EMPTY_ARRAY); assertThat(response.isCompleteMatch(), is(false)); assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(2)); assertThat(response.getIndexPrivileges(), containsInAnyOrder( @@ -288,6 +271,39 @@ public class TransportHasPrivilegesActionTests extends ESTestCase { )); } + public void testIsCompleteMatch() throws Exception { + role = Role.builder("test-write") + .cluster(ClusterPrivilege.MONITOR) + .add(IndexPrivilege.READ, "read-*") + .add(IndexPrivilege.ALL, "all-*") + .build(); + + assertThat(hasPrivileges(indexPrivileges("read", "read-123", "read-456", "all-999"), "monitor").isCompleteMatch(), is(true)); + assertThat(hasPrivileges(indexPrivileges("read", "read-123", "read-456", "all-999"), "manage").isCompleteMatch(), is(false)); + assertThat(hasPrivileges(indexPrivileges("write", "read-123", "read-456", "all-999"), "monitor").isCompleteMatch(), is(false)); + assertThat(hasPrivileges(indexPrivileges("write", "read-123", "read-456", "all-999"), "manage").isCompleteMatch(), is(false)); + } + + private RoleDescriptor.IndicesPrivileges indexPrivileges(String priv, String... indices) { + return RoleDescriptor.IndicesPrivileges.builder() + .indices(indices) + .privileges(priv) + .build(); + } + + private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges indicesPrivileges, String... clusterPrivileges) + throws Exception { + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.username(user.principal()); + request.clusterPrivileges(clusterPrivileges); + request.indexPrivileges(indicesPrivileges); + final PlainActionFuture future = new PlainActionFuture(); + action.doExecute(request, future); + final HasPrivilegesResponse response = future.get(); + assertThat(response, notNullValue()); + return response; + } + private static MapBuilder mapBuilder() { return MapBuilder.newMapBuilder(); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java index e0e2e0ec869..2739542f56e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.security.authc.Authentication; +import org.elasticsearch.xpack.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.ElasticUser; @@ -55,7 +56,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Authentication authentication = mock(Authentication.class); when(threadPool.getThreadContext()).thenReturn(threadContext); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); when(authentication.getUser()).thenReturn(user); NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, @@ -93,7 +94,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Authentication authentication = mock(Authentication.class); when(threadPool.getThreadContext()).thenReturn(threadContext); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); when(authentication.getUser()).thenReturn(user); NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, @@ -130,7 +131,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Authentication authentication = mock(Authentication.class); when(threadPool.getThreadContext()).thenReturn(threadContext); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); when(authentication.getUser()).thenReturn(new User("the runner")); final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); @@ -181,7 +182,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Authentication authentication = mock(Authentication.class); when(threadPool.getThreadContext()).thenReturn(threadContext); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); when(authentication.getUser()).thenReturn(new User("the runner")); final User user = randomFrom(new ElasticUser(true), new KibanaUser(true), new User("joe")); @@ -234,7 +235,7 @@ public class TransportSetEnabledActionTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Authentication authentication = mock(Authentication.class); when(threadPool.getThreadContext()).thenReturn(threadContext); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); when(authentication.getUser()).thenReturn(user); NativeUsersStore usersStore = mock(NativeUsersStore.class); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index dac7264aacb..f01638251ea 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.security.ScrollHelper; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; -import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.elasticsearch.xpack.security.authc.AuthenticationServiceField; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -87,7 +87,7 @@ public class AuditTrailTests extends SecurityIntegTestCase { getRestClient().performRequest("GET", "/.security/_search", new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationService.RUN_AS_USER_HEADER, EXECUTE_USER)); + new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, EXECUTE_USER)); fail("request should have failed"); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -109,7 +109,7 @@ public class AuditTrailTests extends SecurityIntegTestCase { getRestClient().performRequest("GET", "/.security/_search", new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, UsernamePasswordToken.basicAuthHeaderValue(AUTHENTICATE_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationService.RUN_AS_USER_HEADER, "")); + new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "")); fail("request should have failed"); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 57cb3cd7e82..0e3494206a9 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -274,11 +274,11 @@ public class AuthenticationServiceTests extends ESTestCase { assertThat(result, notNullValue()); assertThat(result.getUser(), is(user)); - String userStr = threadContext.getHeader(Authentication.AUTHENTICATION_KEY); + String userStr = threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY); assertThat(userStr, notNullValue()); assertThat(userStr, equalTo("_signed_auth")); - Authentication ctxAuth = threadContext.getTransient(Authentication.AUTHENTICATION_KEY); + Authentication ctxAuth = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); assertThat(ctxAuth, is(result)); } @@ -399,7 +399,7 @@ public class AuthenticationServiceTests extends ESTestCase { assertThat(authentication.getUser(), sameInstance(user1)); assertThreadContextContainsAuthentication(authentication); authRef.set(authentication); - authHeaderRef.set(threadContext.getHeader(Authentication.AUTHENTICATION_KEY)); + authHeaderRef.set(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY)); setCompletedToTrue(completed); }, this::logAndFail)); } @@ -415,11 +415,11 @@ public class AuthenticationServiceTests extends ESTestCase { new DefaultAuthenticationFailureHandler(), threadPool1, new AnonymousUser(Settings.EMPTY), tokenService); - threadContext1.putTransient(Authentication.AUTHENTICATION_KEY, authRef.get()); - threadContext1.putHeader(Authentication.AUTHENTICATION_KEY, authHeaderRef.get()); + threadContext1.putTransient(AuthenticationField.AUTHENTICATION_KEY, authRef.get()); + threadContext1.putHeader(AuthenticationField.AUTHENTICATION_KEY, authHeaderRef.get()); service.authenticate("_action", message1, SystemUser.INSTANCE, ActionListener.wrap(ctxAuth -> { assertThat(ctxAuth, sameInstance(authRef.get())); - assertThat(threadContext1.getHeader(Authentication.AUTHENTICATION_KEY), sameInstance(authHeaderRef.get())); + assertThat(threadContext1.getHeader(AuthenticationField.AUTHENTICATION_KEY), sameInstance(authHeaderRef.get())); setCompletedToTrue(completed); }, this::logAndFail)); assertTrue(completed.compareAndSet(true, false)); @@ -437,17 +437,17 @@ public class AuthenticationServiceTests extends ESTestCase { try (ThreadContext.StoredContext ignore = threadContext2.stashContext()) { service = new AuthenticationService(Settings.EMPTY, realms, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool2, new AnonymousUser(Settings.EMPTY), tokenService); - threadContext2.putHeader(Authentication.AUTHENTICATION_KEY, authHeaderRef.get()); + threadContext2.putHeader(AuthenticationField.AUTHENTICATION_KEY, authHeaderRef.get()); BytesStreamOutput output = new BytesStreamOutput(); threadContext2.writeTo(output); StreamInput input = output.bytes().streamInput(); threadContext2 = new ThreadContext(Settings.EMPTY); threadContext2.readHeaders(input); - header = threadContext2.getHeader(Authentication.AUTHENTICATION_KEY); + header = threadContext2.getHeader(AuthenticationField.AUTHENTICATION_KEY); } - threadPool2.getThreadContext().putHeader(Authentication.AUTHENTICATION_KEY, header); + threadPool2.getThreadContext().putHeader(AuthenticationField.AUTHENTICATION_KEY, header); service = new AuthenticationService(Settings.EMPTY, realms, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool2, new AnonymousUser(Settings.EMPTY), tokenService); service.authenticate("_action", new InternalMessage(), SystemUser.INSTANCE, ActionListener.wrap(result -> { @@ -464,7 +464,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testAuthenticateTamperedUser() throws Exception { InternalMessage message = new InternalMessage(); - threadContext.putHeader(Authentication.AUTHENTICATION_KEY, "_signed_auth"); + threadContext.putHeader(AuthenticationField.AUTHENTICATION_KEY, "_signed_auth"); try { authenticateBlocking("_action", message, randomBoolean() ? SystemUser.INSTANCE : null); @@ -608,7 +608,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testRealmLookupThrowingException() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); - threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, "run_as"); + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); @@ -627,7 +627,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testRealmLookupThrowingExceptionRest() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); - threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, "run_as"); + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); @@ -646,7 +646,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testRunAsLookupSameRealm() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); - threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, "run_as"); + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); final User user = new User("lookup user", new String[]{"user"}, "lookup user", "lookup@foo.foo", @@ -693,7 +693,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testRunAsLookupDifferentRealm() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); - threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, "run_as"); + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); @@ -730,7 +730,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testRunAsWithEmptyRunAsUsernameRest() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); User user = new User("lookup user", new String[]{"user"}); - threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, ""); + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, ""); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, user); @@ -747,7 +747,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testRunAsWithEmptyRunAsUsername() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); User user = new User("lookup user", new String[]{"user"}); - threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, ""); + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, ""); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, user); @@ -763,7 +763,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testAuthenticateTransportDisabledRunAsUser() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); - threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, "run_as"); + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); @@ -783,7 +783,7 @@ public class AuthenticationServiceTests extends ESTestCase { public void testAuthenticateRestDisabledRunAsUser() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); - threadContext.putHeader(AuthenticationService.RUN_AS_USER_HEADER, "run_as"); + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); @@ -890,10 +890,10 @@ public class AuthenticationServiceTests extends ESTestCase { } void assertThreadContextContainsAuthentication(Authentication authentication) throws IOException { - Authentication contextAuth = threadContext.getTransient(Authentication.AUTHENTICATION_KEY); + Authentication contextAuth = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); assertThat(contextAuth, notNullValue()); assertThat(contextAuth, is(authentication)); - assertThat(threadContext.getHeader(Authentication.AUTHENTICATION_KEY), equalTo((Object) authentication.encode())); + assertThat(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY), equalTo((Object) authentication.encode())); } private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java index 37d9b794571..fb076d8ab1a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java @@ -103,7 +103,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { // let's run as without authorization try { - Map headers = Collections.singletonMap(AuthenticationService.RUN_AS_USER_HEADER, + Map headers = Collections.singletonMap(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME); client.filterWithHeader(headers) .admin().cluster().prepareHealth().get(); @@ -116,7 +116,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { Map headers = new HashMap<>(); headers.put("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, new SecureString(SecuritySettingsSource.TEST_PASSWORD.toCharArray()))); - headers.put(AuthenticationService.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME); + headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME); // lets set the user ClusterHealthResponse response = client.filterWithHeader(headers).admin().cluster().prepareHealth().get(); assertThat(response.isTimedOut(), is(false)); @@ -130,7 +130,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, UsernamePasswordToken.basicAuthHeaderValue(TRANSPORT_CLIENT_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationService.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME)); + new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME)); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); @@ -154,7 +154,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationService.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME)); + new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME)); assertThat(response.getStatusLine().getStatusCode(), is(200)); } @@ -170,7 +170,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { Map headers = new HashMap<>(); headers.put("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, new SecureString(SecuritySettingsSource.TEST_PASSWORD.toCharArray()))); - headers.put(AuthenticationService.RUN_AS_USER_HEADER, ""); + headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, ""); client.filterWithHeader(headers).admin().cluster().prepareHealth().get(); fail("run as header should not be allowed to be empty"); @@ -186,7 +186,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationService.RUN_AS_USER_HEADER, "")); + new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "")); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(401)); @@ -205,7 +205,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { Map headers = new HashMap<>(); headers.put("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, new SecureString(SecuritySettingsSource.TEST_PASSWORD.toCharArray()))); - headers.put(AuthenticationService.RUN_AS_USER_HEADER, "idontexist"); + headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, "idontexist"); client.filterWithHeader(headers).admin().cluster().prepareHealth().get(); fail("run as header should not accept non-existent users"); @@ -221,7 +221,7 @@ public class RunAsIntegTests extends SecurityIntegTestCase { new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, TEST_PASSWORD_SECURE_STRING)), - new BasicHeader(AuthenticationService.RUN_AS_USER_HEADER, "idontexist")); + new BasicHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "idontexist")); fail("request should have failed"); } catch (ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index d1a11ebe828..894a71b5cbd 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.SecurityLifecycleService; -import org.elasticsearch.xpack.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; import org.elasticsearch.xpack.security.authc.support.Hasher; @@ -241,20 +240,20 @@ public class ReservedRealmTests extends ESTestCase { public void testIsReserved() { final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expectedUser.principal(); - assertThat(ReservedRealm.isReserved(principal, Settings.EMPTY), is(true)); + assertThat(ClientReservedRealm.isReserved(principal, Settings.EMPTY), is(true)); final String notExpected = randomFrom("foobar", "", randomAlphaOfLengthBetween(1, 30)); - assertThat(ReservedRealm.isReserved(notExpected, Settings.EMPTY), is(false)); + assertThat(ClientReservedRealm.isReserved(notExpected, Settings.EMPTY), is(false)); } public void testIsReservedDisabled() { Settings settings = Settings.builder().put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false).build(); final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expectedUser.principal(); - assertThat(ReservedRealm.isReserved(principal, settings), is(false)); + assertThat(ClientReservedRealm.isReserved(principal, settings), is(false)); final String notExpected = randomFrom("foobar", "", randomAlphaOfLengthBetween(1, 30)); - assertThat(ReservedRealm.isReserved(notExpected, settings), is(false)); + assertThat(ClientReservedRealm.isReserved(notExpected, settings), is(false)); } public void testGetUsers() { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java index be32419f85c..409278f3f64 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger; import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; @@ -66,7 +66,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { } public void testStore_ConfiguredWithUnreadableFile() throws Exception { - Path xpackConf = env.configFile().resolve(XPackPlugin.NAME); + Path xpackConf = env.configFile().resolve(XpackField.NAME); Files.createDirectories(xpackConf); Path file = xpackConf.resolve("users"); @@ -82,7 +82,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testStore_AutoReload() throws Exception { Path users = getDataPath("users"); - Path xpackConf = env.configFile().resolve(XPackPlugin.NAME); + Path xpackConf = env.configFile().resolve(XpackField.NAME); Files.createDirectories(xpackConf); Path file = xpackConf.resolve("users"); Files.copy(users, file, StandardCopyOption.REPLACE_EXISTING); @@ -119,7 +119,7 @@ public class FileUserPasswdStoreTests extends ESTestCase { public void testStore_AutoReload_WithParseFailures() throws Exception { Path users = getDataPath("users"); - Path xpackConf = env.configFile().resolve(XPackPlugin.NAME); + Path xpackConf = env.configFile().resolve(XpackField.NAME); Files.createDirectories(xpackConf); Path testUsers = xpackConf.resolve("users"); Files.copy(users, testUsers, StandardCopyOption.REPLACE_EXISTING); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java index eb9994c99e6..493b2c031e0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStoreTests.java @@ -16,8 +16,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.junit.After; @@ -270,7 +270,7 @@ public class FileUserRolesStoreTests extends ESTestCase { } private Path getUsersRolesPath() throws IOException { - Path xpackConf = env.configFile().resolve(XPackPlugin.NAME); + Path xpackConf = env.configFile().resolve(XpackField.NAME); Files.createDirectories(xpackConf); return xpackConf.resolve("users_roles"); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/tool/UsersToolTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/tool/UsersToolTests.java index 28c29ac7d31..65711f43575 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/tool/UsersToolTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/tool/UsersToolTests.java @@ -20,9 +20,9 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.security.user.ElasticUser; import org.elasticsearch.xpack.security.user.KibanaUser; import org.junit.AfterClass; @@ -67,7 +67,7 @@ public class UsersToolTests extends CommandTestCase { public void setupHome() throws IOException { Path homeDir = jimfs.getPath("eshome"); IOUtils.rm(homeDir); - confDir = homeDir.resolve("config").resolve(XPackPlugin.NAME); + confDir = homeDir.resolve("config").resolve(XpackField.NAME); Files.createDirectories(confDir); String defaultPassword = SecuritySettingsSource.TEST_PASSWORD; Files.write(confDir.resolve("users"), Arrays.asList( @@ -492,7 +492,7 @@ public class UsersToolTests extends CommandTestCase { public void testUserAddNoConfig() throws Exception { Path homeDir = jimfs.getPath("eshome"); - Path xpackConfDir = homeDir.resolve("config").resolve(XPackPlugin.NAME); + Path xpackConfDir = homeDir.resolve("config").resolve(XpackField.NAME); IOUtils.rm(confDir); pathHomeParameter = "-Epath.home=" + homeDir; fileTypeParameter = "-Expack.security.authc.realms.file.type=file"; @@ -505,7 +505,7 @@ public class UsersToolTests extends CommandTestCase { public void testUserListNoConfig() throws Exception { Path homeDir = jimfs.getPath("eshome"); - Path xpackConfDir = homeDir.resolve("config").resolve(XPackPlugin.NAME); + Path xpackConfDir = homeDir.resolve("config").resolve(XpackField.NAME); IOUtils.rm(confDir); pathHomeParameter = "-Epath.home=" + homeDir; fileTypeParameter = "-Expack.security.authc.realms.file.type=file"; @@ -518,7 +518,7 @@ public class UsersToolTests extends CommandTestCase { public void testUserDelNoConfig() throws Exception { Path homeDir = jimfs.getPath("eshome"); - Path xpackConfDir = homeDir.resolve("config").resolve(XPackPlugin.NAME); + Path xpackConfDir = homeDir.resolve("config").resolve(XpackField.NAME); IOUtils.rm(confDir); pathHomeParameter = "-Epath.home=" + homeDir; fileTypeParameter = "-Expack.security.authc.realms.file.type=file"; @@ -531,7 +531,7 @@ public class UsersToolTests extends CommandTestCase { public void testListUserRolesNoConfig() throws Exception { Path homeDir = jimfs.getPath("eshome"); - Path xpackConfDir = homeDir.resolve("config").resolve(XPackPlugin.NAME); + Path xpackConfDir = homeDir.resolve("config").resolve(XpackField.NAME); IOUtils.rm(confDir); pathHomeParameter = "-Epath.home=" + homeDir; fileTypeParameter = "-Expack.security.authc.realms.file.type=file"; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryIntegTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryIntegTests.java index ae2a97c735f..ce346bdfab2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryIntegTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryIntegTests.java @@ -5,6 +5,11 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import com.unboundid.ldap.sdk.LDAPConnection; +import com.unboundid.ldap.sdk.LDAPConnectionPool; +import com.unboundid.ldap.sdk.LDAPException; +import com.unboundid.ldap.sdk.LDAPInterface; + import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; @@ -16,6 +21,8 @@ import org.elasticsearch.xpack.ssl.VerificationMode; import org.junit.Before; import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; @Network public class AbstractActiveDirectoryIntegTests extends ESTestCase { @@ -78,4 +85,24 @@ public class AbstractActiveDirectoryIntegTests extends ESTestCase { } return builder.build(); } + + protected static void assertConnectionCanReconnect(LDAPInterface conn) { + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + try { + if (conn instanceof LDAPConnection) { + ((LDAPConnection) conn).reconnect(); + } else if (conn instanceof LDAPConnectionPool) { + try (LDAPConnection c = ((LDAPConnectionPool) conn).getConnection()) { + c.reconnect(); + } + } + } catch (LDAPException e) { + fail("Connection is not valid. It will not work on follow referral flow."); + } + return null; + } + }); + } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index fec760b08ae..a51660a6695 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -67,6 +67,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String userName = "ironman"; try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( containsString("Geniuses"), @@ -90,6 +91,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String userName = "ades\\ironman"; try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( containsString("Geniuses"), @@ -132,6 +134,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String[] users = new String[]{"cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow"}; for (String user : users) { try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); assertThat("group avenger test for user " + user, groups(ldap), hasItem(containsString("Avengers"))); } } @@ -148,6 +151,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String userName = "hulk"; try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( @@ -172,6 +176,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String userName = "hulk"; try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( @@ -200,6 +205,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String userName = "hulk"; try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, hasItem(containsString("Avengers"))); @@ -218,6 +224,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI //Login with the UserPrincipalName String userDN = "CN=Erik Selvig,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; try (LdapSession ldap = session(sessionFactory, "erik.selvig", SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(ldap.userDn(), is(userDN)); assertThat(groups, containsInAnyOrder( @@ -238,6 +245,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI //login with sAMAccountName String userDN = "CN=Erik Selvig,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; try (LdapSession ldap = session(sessionFactory, "selvig", SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); assertThat(ldap.userDn(), is(userDN)); List groups = groups(ldap); @@ -263,6 +271,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI //Login with the UserPrincipalName try (LdapSession ldap = session(sessionFactory, "erik.selvig", SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( containsString("CN=Geniuses"), @@ -297,6 +306,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String user = "Bruce Banner"; try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( @@ -308,6 +318,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI } @SuppressWarnings("unchecked") + @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3369") public void testHandlingLdapReferralErrors() throws Exception { String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com"; String userTemplate = "CN={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; @@ -361,6 +372,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI String user = "Bruce Banner"; try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( @@ -419,6 +431,7 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI "ADES\\cap", "ADES\\hawkeye", "ADES\\hulk", "ADES\\ironman", "ADES\\thor", "ADES\\blackwidow")); for (String user : users) { try (LdapSession ldap = unauthenticatedSession(sessionFactory, user)) { + assertConnectionCanReconnect(ldap.getConnection()); assertNotNull("ldap session was null for user " + user, ldap); assertThat("group avenger test for user " + user, groups(ldap), hasItem(containsString("Avengers"))); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java index 2a1d49738ad..7a97b23866a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.ldap; import com.unboundid.ldap.listener.InMemoryDirectoryServer; import com.unboundid.ldap.sdk.LDAPException; +import com.unboundid.ldap.sdk.LDAPInterface; import com.unboundid.ldap.sdk.LDAPURL; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -124,6 +125,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { SecureString userPass = new SecureString("pass"); try (LdapSession ldap = session(sessionFactory, user, userPass)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString(user)); } @@ -163,6 +165,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { SecureString userPass = new SecureString("pass"); try (LdapSession ldap = session(ldapFac, user, userPass)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, contains("cn=HMS Lydia,ou=crews,ou=groups,o=sevenSeas")); } @@ -178,6 +181,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { String user = "Horatio Hornblower"; try (LdapSession ldap = session(ldapFac, user, new SecureString("pass"))) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, contains("cn=HMS Lydia,ou=crews,ou=groups,o=sevenSeas")); } @@ -195,6 +199,7 @@ public class LdapSessionFactoryTests extends LdapTestCase { SecureString userPass = new SecureString("pass"); try (LdapSession ldap = session(ldapFac, user, userPass)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups.size(), is(1)); assertThat(groups, contains("cn=HMS Lydia,ou=crews,ou=groups,o=sevenSeas")); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java index b08873f0063..dbfda854a58 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java @@ -138,12 +138,14 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { try { // auth try (LdapSession ldap = session(sessionFactory, user, userPass)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString(user)); } //lookup try (LdapSession ldap = unauthenticatedSession(sessionFactory, user)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString(user)); } @@ -221,12 +223,14 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { try { // auth try (LdapSession ldap = session(sessionFactory, user, userPass)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString(user)); } //lookup try (LdapSession ldap = unauthenticatedSession(sessionFactory, user)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString(user)); } @@ -304,12 +308,14 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { try { //auth try (LdapSession ldap = session(sessionFactory, user, userPass)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString(user)); } //lookup try (LdapSession ldap = unauthenticatedSession(sessionFactory, user)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString(user)); } @@ -378,12 +384,14 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { try { //auth try (LdapSession ldap = session(sessionFactory, user, userPass)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString("William Bush")); } //lookup try (LdapSession ldap = unauthenticatedSession(sessionFactory, user)) { + assertConnectionCanReconnect(ldap.getConnection()); String dn = ldap.userDn(); assertThat(dn, containsString("William Bush")); } @@ -422,6 +430,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { try { //auth try (LdapSession ldap = session(sessionFactory, user, new SecureString(ActiveDirectorySessionFactoryTests.PASSWORD))) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( @@ -433,6 +442,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { //lookup try (LdapSession ldap = unauthenticatedSession(sessionFactory, user)) { + assertConnectionCanReconnect(ldap.getConnection()); List groups = groups(ldap); assertThat(groups, containsInAnyOrder( diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java index 8bdfb7ff833..c913ab91506 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java @@ -7,8 +7,13 @@ package org.elasticsearch.xpack.security.authc.ldap.support; import com.unboundid.ldap.listener.InMemoryDirectoryServer; import com.unboundid.ldap.sdk.Attribute; +import com.unboundid.ldap.sdk.LDAPConnection; +import com.unboundid.ldap.sdk.LDAPConnectionPool; import com.unboundid.ldap.sdk.LDAPException; +import com.unboundid.ldap.sdk.LDAPInterface; import com.unboundid.ldap.sdk.LDAPURL; + +import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; @@ -27,6 +32,8 @@ import org.junit.Before; import org.junit.BeforeClass; import java.security.AccessController; +import java.security.PrivilegedAction; +import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.List; @@ -160,4 +167,24 @@ public abstract class LdapTestCase extends ESTestCase { factory.unauthenticatedSession(username, future); return future.actionGet(); } + + protected static void assertConnectionCanReconnect(LDAPInterface conn) { + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + try { + if (conn instanceof LDAPConnection) { + ((LDAPConnection) conn).reconnect(); + } else if (conn instanceof LDAPConnectionPool) { + try (LDAPConnection c = ((LDAPConnectionPool) conn).getConnection()) { + c.reconnect(); + } + } + } catch (LDAPException e) { + fail("Connection is not valid. It will not work on follow referral flow."); + } + return null; + } + }); + } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 5b5e217e454..220defd506b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -113,8 +113,8 @@ import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.Role; +import org.elasticsearch.xpack.security.authz.store.ClientReservedRoles; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; -import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.ElasticUser; import org.elasticsearch.xpack.security.user.SystemUser; @@ -597,7 +597,7 @@ public class AuthorizationServiceTests extends ESTestCase { public void testRunAsRequestWithoutLookedUpBy() { AuthenticateRequest request = new AuthenticateRequest("run as me"); - roleMap.put("can run as", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + roleMap.put("can run as", ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR); User user = new User("run as me", Strings.EMPTY_ARRAY, new User("test user", new String[] { "can run as" })); Authentication authentication = new Authentication(user, new RealmRef("foo", "bar", "baz"), null); assertNotEquals(user.authenticatedUser(), user); @@ -605,7 +605,7 @@ public class AuthorizationServiceTests extends ESTestCase { () -> authorize(authentication, AuthenticateAction.NAME, request), AuthenticateAction.NAME, "test user", "run as me"); // run as [run as me] verify(auditTrail).runAsDenied(user, AuthenticateAction.NAME, request, - new String[] { ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName() }); + new String[] { ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName() }); verifyNoMoreInteractions(auditTrail); } @@ -777,8 +777,8 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testSuperusersCanExecuteOperationAgainstSecurityIndex() { - final User superuser = new User("custom_admin", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()); - roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + final User superuser = new User("custom_admin", ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName()); + roleMap.put(ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName(), ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() @@ -816,8 +816,8 @@ public class AuthorizationServiceTests extends ESTestCase { } public void testSuperusersCanExecuteOperationAgainstSecurityIndexWithWildcard() { - final User superuser = new User("custom_admin", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()); - roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + final User superuser = new User("custom_admin", ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName()); + roleMap.put(ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName(), ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR); ClusterState state = mock(ClusterState.class); when(clusterService.state()).thenReturn(state); when(state.metaData()).thenReturn(MetaData.builder() diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java index 32694f22fe8..2909b73d176 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.ClientHelper; import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; +import org.elasticsearch.xpack.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; @@ -41,7 +42,7 @@ public class AuthorizationUtilsTests extends ESTestCase { } public void testSystemUserSwitchWithSystemUser() { - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, new Authentication(SystemUser.INSTANCE, new RealmRef("test", "test", "foo"), null)); assertThat(AuthorizationUtils.shouldReplaceUserWithSystem(threadContext, "internal:something"), is(false)); } @@ -53,7 +54,7 @@ public class AuthorizationUtilsTests extends ESTestCase { public void testSystemUserSwitchWithNonSystemUser() { User user = new User(randomAlphaOfLength(6), new String[] {}); Authentication authentication = new Authentication(user, new RealmRef("test", "test", "foo"), null); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); threadContext.putTransient(AuthorizationService.ORIGINATING_ACTION_KEY, randomFrom("indices:foo", "cluster:bar")); assertThat(AuthorizationUtils.shouldReplaceUserWithSystem(threadContext, "internal:something"), is(true)); } @@ -61,7 +62,7 @@ public class AuthorizationUtilsTests extends ESTestCase { public void testSystemUserSwitchWithNonSystemUserAndInternalAction() { User user = new User(randomAlphaOfLength(6), new String[] {}); Authentication authentication = new Authentication(user, new RealmRef("test", "test", "foo"), null); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); threadContext.putTransient(AuthorizationService.ORIGINATING_ACTION_KEY, randomFrom("internal:foo/bar")); assertThat(AuthorizationUtils.shouldReplaceUserWithSystem(threadContext, "internal:something"), is(false)); } @@ -76,11 +77,11 @@ public class AuthorizationUtilsTests extends ESTestCase { // set authentication User user = new User(randomAlphaOfLength(6), new String[] {}); Authentication authentication = new Authentication(user, new RealmRef("test", "test", "foo"), null); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); assertFalse(AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadContext)); threadContext = new ThreadContext(Settings.EMPTY); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); assertFalse(AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadContext)); threadContext = new ThreadContext(Settings.EMPTY); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 8ad435db957..4b741ed724a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -64,8 +64,8 @@ import org.elasticsearch.xpack.security.authz.IndicesAndAliasesResolver.Resolved import org.elasticsearch.xpack.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.security.authz.permission.Role; +import org.elasticsearch.xpack.security.authz.store.ClientReservedRoles; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; -import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.User; @@ -148,7 +148,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { roleMap.put("dash", new RoleDescriptor("dash", null, new IndicesPrivileges[] { IndicesPrivileges.builder().indices(dashIndices).privileges("all").build() }, null)); roleMap.put("test", new RoleDescriptor("role", new String[] { "monitor" }, null, null)); - roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + roleMap.put(ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName(), ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); doAnswer((i) -> { ActionListener callback = diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java index 10d13da16fb..5a80640b0ab 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.transport.TransportRequest.Empty; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; +import org.elasticsearch.xpack.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.user.User; import static org.elasticsearch.mock.orig.Mockito.verifyNoMoreInteractions; @@ -66,7 +67,7 @@ public class SecuritySearchOperationListenerTests extends ESTestCase { SecuritySearchOperationListener listener = new SecuritySearchOperationListener(threadContext, licenseState, auditTrailService); listener.onNewScrollContext(testSearchContext); - Authentication contextAuth = testSearchContext.scrollContext().getFromContext(Authentication.AUTHENTICATION_KEY); + Authentication contextAuth = testSearchContext.scrollContext().getFromContext(AuthenticationField.AUTHENTICATION_KEY); assertEquals(authentication, contextAuth); assertEquals(scroll, testSearchContext.scrollContext().scroll); @@ -77,7 +78,7 @@ public class SecuritySearchOperationListenerTests extends ESTestCase { public void testValidateSearchContext() throws Exception { TestScrollSearchContext testSearchContext = new TestScrollSearchContext(); testSearchContext.scrollContext(new ScrollContext()); - testSearchContext.scrollContext().putInContext(Authentication.AUTHENTICATION_KEY, + testSearchContext.scrollContext().putInContext(AuthenticationField.AUTHENTICATION_KEY, new Authentication(new User("test", "role"), new RealmRef("realm", "file", "node"), null)); testSearchContext.scrollContext().scroll = new Scroll(TimeValue.timeValueSeconds(2L)); XPackLicenseState licenseState = mock(XPackLicenseState.class); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SetSecurityUserProcessorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SetSecurityUserProcessorTests.java index eb821597b0d..3d35adc83a9 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SetSecurityUserProcessorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/SetSecurityUserProcessorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.authc.Authentication; +import org.elasticsearch.xpack.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.authz.accesscontrol.SetSecurityUserProcessor.Property; import org.elasticsearch.xpack.security.user.User; @@ -28,7 +29,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { Collections.singletonMap("key", "value"), true); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); SetSecurityUserProcessor processor = new SetSecurityUserProcessor("_tag", threadContext, "_field", EnumSet.allOf(Property.class)); @@ -48,7 +49,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { // test when user holds no data: threadContext = new ThreadContext(Settings.EMPTY); user = new User(null, null, null); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); processor = new SetSecurityUserProcessor("_tag", threadContext, "_field", EnumSet.allOf(Property.class)); processor.execute(ingestDocument); @@ -68,7 +69,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); User user = new User("_username", null, null); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); SetSecurityUserProcessor processor = new SetSecurityUserProcessor("_tag", threadContext, "_field", EnumSet.of(Property.USERNAME)); @@ -84,7 +85,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); User user = new User(null, "role1", "role2"); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); SetSecurityUserProcessor processor = new SetSecurityUserProcessor("_tag", threadContext, "_field", EnumSet.of(Property.ROLES)); @@ -102,7 +103,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); User user = new User(null, null, "_full_name", null, null, true); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); SetSecurityUserProcessor processor = new SetSecurityUserProcessor("_tag", threadContext, "_field", EnumSet.of(Property.FULL_NAME)); @@ -118,7 +119,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); User user = new User(null, null, null, "_email", null, true); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); SetSecurityUserProcessor processor = new SetSecurityUserProcessor("_tag", threadContext, "_field", EnumSet.of(Property.EMAIL)); @@ -134,7 +135,7 @@ public class SetSecurityUserProcessorTests extends ESTestCase { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); User user = new User(null, null, null, null, Collections.singletonMap("key", "value"), true); Authentication.RealmRef realmRef = new Authentication.RealmRef("_name", "_type", "_node_name"); - threadContext.putTransient(Authentication.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, new Authentication(user, realmRef, null)); IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); SetSecurityUserProcessor processor = new SetSecurityUserProcessor("_tag", threadContext, "_field", EnumSet.of(Property.METADATA)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index e9136bfaaad..be967c75477 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -206,8 +206,8 @@ public class CompositeRolesStoreTests extends ESTestCase { final int numberOfTimesToCall = scaledRandomIntBetween(0, 32); final boolean getSuperuserRole = randomBoolean() - && roleName.equals(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()) == false; - final Set names = getSuperuserRole ? Sets.newHashSet(roleName, ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName()) + && roleName.equals(ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName()) == false; + final Set names = getSuperuserRole ? Sets.newHashSet(roleName, ClientReservedRoles.SUPERUSER_ROLE_DESCRIPTOR.getName()) : Collections.singleton(roleName); for (int i = 0; i < numberOfTimesToCall; i++) { future = new PlainActionFuture<>(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index b57c5ab6ead..3102852fee2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -17,8 +17,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger; import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authz.permission.ClusterPermission; @@ -302,7 +302,7 @@ public class FileRolesStoreTests extends ESTestCase { try { Path roles = getDataPath("roles.yml"); Path home = createTempDir(); - Path xpackConf = home.resolve("config").resolve(XPackPlugin.NAME); + Path xpackConf = home.resolve("config").resolve(XpackField.NAME); Files.createDirectories(xpackConf); Path tmp = xpackConf.resolve("roles.yml"); try (OutputStream stream = Files.newOutputStream(tmp)) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java index ec9de7a24b9..3ac056c8ab4 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java @@ -71,6 +71,7 @@ import org.elasticsearch.xpack.ml.action.UpdateProcessAction; import org.elasticsearch.xpack.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.ml.action.ValidateJobConfigAction; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction; import org.elasticsearch.xpack.security.action.role.PutRoleAction; @@ -109,22 +110,22 @@ public class ReservedRolesStoreTests extends ESTestCase { private static final String READ_CROSS_CLUSTER_NAME = "internal:transport/proxy/indices:data/read/query"; public void testIsReserved() { - assertThat(ReservedRolesStore.isReserved("kibana_system"), is(true)); - assertThat(ReservedRolesStore.isReserved("superuser"), is(true)); - assertThat(ReservedRolesStore.isReserved("foobar"), is(false)); - assertThat(ReservedRolesStore.isReserved(SystemUser.ROLE_NAME), is(true)); - assertThat(ReservedRolesStore.isReserved("transport_client"), is(true)); - assertThat(ReservedRolesStore.isReserved("kibana_user"), is(true)); - assertThat(ReservedRolesStore.isReserved("ingest_admin"), is(true)); - assertThat(ReservedRolesStore.isReserved("remote_monitoring_agent"), is(true)); - assertThat(ReservedRolesStore.isReserved("monitoring_user"), is(true)); - assertThat(ReservedRolesStore.isReserved("reporting_user"), is(true)); - assertThat(ReservedRolesStore.isReserved("machine_learning_user"), is(true)); - assertThat(ReservedRolesStore.isReserved("machine_learning_admin"), is(true)); - assertThat(ReservedRolesStore.isReserved("watcher_user"), is(true)); - assertThat(ReservedRolesStore.isReserved("watcher_admin"), is(true)); - assertThat(ReservedRolesStore.isReserved("kibana_dashboard_only_user"), is(true)); - assertThat(ReservedRolesStore.isReserved(XPackUser.ROLE_NAME), is(true)); + assertThat(ClientReservedRoles.isReserved("kibana_system"), is(true)); + assertThat(ClientReservedRoles.isReserved("superuser"), is(true)); + assertThat(ClientReservedRoles.isReserved("foobar"), is(false)); + assertThat(ClientReservedRoles.isReserved(SystemUser.ROLE_NAME), is(true)); + assertThat(ClientReservedRoles.isReserved("transport_client"), is(true)); + assertThat(ClientReservedRoles.isReserved("kibana_user"), is(true)); + assertThat(ClientReservedRoles.isReserved("ingest_admin"), is(true)); + assertThat(ClientReservedRoles.isReserved("remote_monitoring_agent"), is(true)); + assertThat(ClientReservedRoles.isReserved("monitoring_user"), is(true)); + assertThat(ClientReservedRoles.isReserved("reporting_user"), is(true)); + assertThat(ClientReservedRoles.isReserved("machine_learning_user"), is(true)); + assertThat(ClientReservedRoles.isReserved("machine_learning_admin"), is(true)); + assertThat(ClientReservedRoles.isReserved("watcher_user"), is(true)); + assertThat(ClientReservedRoles.isReserved("watcher_admin"), is(true)); + assertThat(ClientReservedRoles.isReserved("kibana_dashboard_only_user"), is(true)); + assertThat(ClientReservedRoles.isReserved(XPackUser.ROLE_NAME), is(true)); } public void testIngestAdminRole() { @@ -506,7 +507,7 @@ public class ReservedRolesStoreTests extends ESTestCase { assertNoAccessAllowed(role, "foo"); assertOnlyReadAllowed(role, MlMetaIndex.INDEX_NAME); assertOnlyReadAllowed(role, AnomalyDetectorsIndex.jobStateIndexName()); - assertOnlyReadAllowed(role, AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT); + assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); assertOnlyReadAllowed(role, Auditor.NOTIFICATIONS_INDEX); } @@ -556,7 +557,7 @@ public class ReservedRolesStoreTests extends ESTestCase { assertNoAccessAllowed(role, "foo"); assertNoAccessAllowed(role, MlMetaIndex.INDEX_NAME); assertNoAccessAllowed(role, AnomalyDetectorsIndex.jobStateIndexName()); - assertOnlyReadAllowed(role, AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT); + assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); assertOnlyReadAllowed(role, Auditor.NOTIFICATIONS_INDEX); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyToolTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyToolTests.java index e0694ee53c9..d7fa84a6656 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyToolTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyToolTests.java @@ -10,13 +10,12 @@ import com.google.common.jimfs.Jimfs; import org.apache.lucene.util.IOUtils; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.CommandTestCase; -import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.io.PathUtilsForTesting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import org.junit.After; import java.nio.file.FileSystem; @@ -78,7 +77,7 @@ public class SystemKeyToolTests extends CommandTestCase { public void testGeneratePathInSettings() throws Exception { final Path homeDir = initFileSystem(false); - Path xpackConf = homeDir.resolve("config").resolve(XPackPlugin.NAME); + Path xpackConf = homeDir.resolve("config").resolve(XpackField.NAME); Files.createDirectories(xpackConf); execute("-Epath.home=" + homeDir.toString()); byte[] bytes = Files.readAllBytes(xpackConf.resolve("system_key")); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/test/SecurityAssertions.java b/plugin/src/test/java/org/elasticsearch/xpack/security/test/SecurityAssertions.java index 8bbe49b3d2d..156376ac755 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/test/SecurityAssertions.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/test/SecurityAssertions.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.security.test; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.XpackField; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; @@ -21,6 +21,6 @@ public class SecurityAssertions { assertThat(e.status(), is(RestStatus.UNAUTHORIZED)); assertThat(e.getHeaderKeys(), hasSize(1)); assertThat(e.getHeader("WWW-Authenticate"), notNullValue()); - assertThat(e.getHeader("WWW-Authenticate"), contains("Basic realm=\"" + XPackPlugin.SECURITY + "\" charset=\"UTF-8\"")); + assertThat(e.getHeader("WWW-Authenticate"), contains("Basic realm=\"" + XpackField.SECURITY + "\" charset=\"UTF-8\"")); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java index f8dde4c7806..5c0490a01fb 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.file.FileRealm; import org.elasticsearch.xpack.ssl.SSLClientAuth; @@ -93,7 +94,7 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase public void testThatConnectionToServerTypeConnectionWorks() throws IOException, NodeValidationException { Path home = createTempDir(); - Path xpackConf = home.resolve("config").resolve(XPackPlugin.NAME); + Path xpackConf = home.resolve("config").resolve(XpackField.NAME); Files.createDirectories(xpackConf); Transport transport = internalCluster().getDataNodeInstance(Transport.class); @@ -124,7 +125,7 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase public void testThatConnectionToClientTypeConnectionIsRejected() throws IOException, NodeValidationException, InterruptedException { Path home = createTempDir(); - Path xpackConf = home.resolve("config").resolve(XPackPlugin.NAME); + Path xpackConf = home.resolve("config").resolve(XpackField.NAME); Files.createDirectories(xpackConf); writeFile(xpackConf, "users", configUsers()); writeFile(xpackConf, "users_roles", configUsersRoles()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/user/UserTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/user/UserTests.java index e1a6c32c456..aa12ad643d1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/user/UserTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/user/UserTests.java @@ -99,18 +99,27 @@ public class UserTests extends ESTestCase { public void testSystemUserReadAndWrite() throws Exception { BytesStreamOutput output = new BytesStreamOutput(); - User.writeTo(SystemUser.INSTANCE, output); - User readFrom = User.readFrom(output.bytes().streamInput()); + InternalUserSerializationHelper.writeTo(SystemUser.INSTANCE, output); + User readFrom = InternalUserSerializationHelper.readFrom(output.bytes().streamInput()); assertThat(readFrom, is(sameInstance(SystemUser.INSTANCE))); assertThat(readFrom.authenticatedUser(), is(SystemUser.INSTANCE)); } + public void testSystemUserFailsRead() throws Exception { + BytesStreamOutput output = new BytesStreamOutput(); + + InternalUserSerializationHelper.writeTo(SystemUser.INSTANCE, output); + AssertionError e = expectThrows(AssertionError.class, () -> User.readFrom(output.bytes().streamInput())); + + assertThat(e.getMessage(), is("should always return false. Internal users should use the InternalUserSerializationHelper")); + } + public void testXPackUserReadAndWrite() throws Exception { BytesStreamOutput output = new BytesStreamOutput(); - User.writeTo(XPackUser.INSTANCE, output); - User readFrom = User.readFrom(output.bytes().streamInput()); + InternalUserSerializationHelper.writeTo(XPackUser.INSTANCE, output); + User readFrom = InternalUserSerializationHelper.readFrom(output.bytes().streamInput()); assertThat(readFrom, is(sameInstance(XPackUser.INSTANCE))); assertThat(readFrom.authenticatedUser(), is(XPackUser.INSTANCE)); @@ -121,7 +130,7 @@ public class UserTests extends ESTestCase { output.writeBoolean(true); output.writeString(randomAlphaOfLengthBetween(4, 30)); try { - User.readFrom(output.bytes().streamInput()); + InternalUserSerializationHelper.readFrom(output.bytes().streamInput()); fail("system user had wrong name"); } catch (IllegalStateException e) { // expected diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/EncryptSensitiveDataBootstrapCheckTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/EncryptSensitiveDataBootstrapCheckTests.java index 5226291ec87..add4591d1ad 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/EncryptSensitiveDataBootstrapCheckTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/EncryptSensitiveDataBootstrapCheckTests.java @@ -35,7 +35,7 @@ public class EncryptSensitiveDataBootstrapCheckTests extends ESTestCase { public void testKeyInKeystore() { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setFile(Watcher.ENCRYPTION_KEY_SETTING.getKey(), CryptoServiceTests.generateKey()); + secureSettings.setFile(WatcherField.ENCRYPTION_KEY_SETTING.getKey(), CryptoServiceTests.generateKey()); Settings settings = Settings.builder() .put("path.home", createTempDir()) .put(Watcher.ENCRYPT_SENSITIVE_DATA_SETTING.getKey(), true) diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java index 1ceeed71571..f07ee31470b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; import org.elasticsearch.xpack.watcher.WatcherIndexingListener.Configuration; import org.elasticsearch.xpack.watcher.WatcherIndexingListener.ShardAllocationConfiguration; @@ -72,7 +73,7 @@ import static org.mockito.Mockito.when; public class WatcherIndexingListenerTests extends ESTestCase { private WatcherIndexingListener listener; - private Watch.Parser parser = mock(Watch.Parser.class); + private WatchParser parser = mock(WatchParser.class); private ClockMock clock = new ClockMock(); private TriggerService triggerService = mock(TriggerService.class); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index 274bd9b6f10..82237c927a1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -5,21 +5,14 @@ */ package org.elasticsearch.xpack.watcher; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; -import org.elasticsearch.action.search.SearchScrollAction; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.PlainActionFuture; @@ -39,7 +32,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; @@ -54,6 +46,7 @@ import org.elasticsearch.xpack.watcher.execution.ExecutionService; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.WatchStatus; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -64,13 +57,11 @@ import java.util.HashSet; import java.util.List; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.watcher.watch.Watch.INDEX; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -82,7 +73,7 @@ public class WatcherServiceTests extends ESTestCase { TriggeredWatchStore triggeredWatchStore = mock(TriggeredWatchStore.class); ExecutionService executionService = mock(ExecutionService.class); when(executionService.validate(anyObject())).thenReturn(true); - Watch.Parser parser = mock(Watch.Parser.class); + WatchParser parser = mock(WatchParser.class); WatcherService service = new WatcherService(Settings.EMPTY, triggerService, triggeredWatchStore, executionService, parser, mock(Client.class)); @@ -107,7 +98,7 @@ public class WatcherServiceTests extends ESTestCase { TriggeredWatchStore triggeredWatchStore = mock(TriggeredWatchStore.class); ExecutionService executionService = mock(ExecutionService.class); when(executionService.validate(anyObject())).thenReturn(true); - Watch.Parser parser = mock(Watch.Parser.class); + WatchParser parser = mock(WatchParser.class); Client client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); when(client.threadPool()).thenReturn(threadPool); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java index f5b55329803..afba65dae54 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java @@ -34,7 +34,7 @@ public class ActionWrapperTests extends ESTestCase { WatchStatus watchStatus = new WatchStatus(now, Collections.singletonMap("_action", createActionStatus(State.ACKED))); when(watch.status()).thenReturn(watchStatus); - ActionWrapper.Result result = actionWrapper.execute(mockExecutionContent(watch)); + ActionWrapperResult result = actionWrapper.execute(mockExecutionContent(watch)); assertThat(result.condition().met(), is(false)); assertThat(result.action().status(), is(Action.Result.Status.CONDITION_FAILED)); assertThat(watch.status().actionStatus("_action").ackStatus().state(), is(State.AWAITS_SUCCESSFUL_EXECUTION)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java index f23a218ee19..e4d6046d7f9 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.watcher.client.WatcherClient; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.Scheme; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.history.HistoryStore; import org.elasticsearch.xpack.watcher.notification.email.EmailTemplate; import org.elasticsearch.xpack.watcher.notification.email.attachment.DataAttachment; @@ -173,7 +173,7 @@ public class EmailAttachmentTests extends AbstractWatcherIntegrationTestCase { WatchSourceBuilder watchSourceBuilder = watchBuilder() .trigger(schedule(interval(5, IntervalSchedule.Interval.Unit.SECONDS))) .input(noneInput()) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_email", emailAction(emailBuilder).setAuthentication(EmailServer.USERNAME, EmailServer.PASSWORD.toCharArray()) .setAttachments(emailAttachments)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index 6ef5c04773c..168ec114b06 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.Scheme; import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuth; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.history.WatchRecord; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -77,7 +77,7 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", ActionBuilders.webhookAction(builder))) .get(); @@ -116,7 +116,7 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", ActionBuilders.webhookAction(builder))) .get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java index 5ffa1c40524..cc9eb683edd 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookIntegrationTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.watcher.common.http.HttpMethod; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuth; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.history.WatchRecord; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -84,7 +84,7 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", ActionBuilders.webhookAction(builder))) .get(); @@ -124,7 +124,7 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", ActionBuilders.webhookAction(builder))) .get(); @@ -157,7 +157,7 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", ActionBuilders.webhookAction(builder))) .get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java index 1ae8b03e1d9..5048e3da5e1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java @@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.containsString; public class AlwaysConditionTests extends ESTestCase { public void testExecute() throws Exception { - Condition alwaysTrue = AlwaysCondition.INSTANCE; + ExecutableCondition alwaysTrue = InternalAlwaysCondition.INSTANCE; assertTrue(alwaysTrue.execute(null).met()); } @@ -28,7 +28,7 @@ public class AlwaysConditionTests extends ESTestCase { builder.endObject(); XContentParser parser = createParser(builder); parser.nextToken(); - Condition executable = AlwaysCondition.parse("_id", parser); + ExecutableCondition executable = InternalAlwaysCondition.parse("_id", parser); assertTrue(executable.execute(null).met()); } @@ -40,16 +40,16 @@ public class AlwaysConditionTests extends ESTestCase { XContentParser parser = createParser(builder); parser.nextToken(); try { - AlwaysCondition.parse( "_id", parser); + InternalAlwaysCondition.parse( "_id", parser); fail("expected a condition exception trying to parse an invalid condition XContent, [" - + AlwaysCondition.TYPE + "] condition should not parse with a body"); + + InternalAlwaysCondition.TYPE + "] condition should not parse with a body"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("expected an empty object but found [foo]")); } } - public static Condition randomCondition(ScriptService scriptService) { - String type = randomFrom(ScriptCondition.TYPE, AlwaysCondition.TYPE, CompareCondition.TYPE, ArrayCompareCondition.TYPE); + public static ExecutableCondition randomCondition(ScriptService scriptService) { + String type = randomFrom(ScriptCondition.TYPE, InternalAlwaysCondition.TYPE, CompareCondition.TYPE, ArrayCompareCondition.TYPE); switch (type) { case ScriptCondition.TYPE: return new ScriptCondition(mockScript("_script"), scriptService); @@ -61,7 +61,7 @@ public class AlwaysConditionTests extends ESTestCase { randomFrom(ArrayCompareCondition.Op.values()), randomFrom(5, "3"), ArrayCompareCondition.Quantifier.SOME, Clock.systemUTC()); default: - return AlwaysCondition.INSTANCE; + return InternalAlwaysCondition.INSTANCE; } } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareConditionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareConditionTests.java index 59eaad0daf8..db161f171cc 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareConditionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareConditionTests.java @@ -184,7 +184,7 @@ public class ArrayCompareConditionTests extends ESTestCase { XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); parser.nextToken(); - ArrayCompareCondition condition = (ArrayCompareCondition) ArrayCompareCondition.parse(ClockMock.frozen(), "_id", parser); + ArrayCompareCondition condition = ArrayCompareCondition.parse(ClockMock.frozen(), "_id", parser); assertThat(condition, notNullValue()); assertThat(condition.getArrayPath(), is("key1.key2")); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionTests.java index 6302bb71b5c..5b1224ec219 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionTests.java @@ -171,7 +171,7 @@ public class CompareConditionTests extends ESTestCase { XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); parser.nextToken(); - CompareCondition condition = (CompareCondition) CompareCondition.parse(ClockMock.frozen(), "_id", parser); + CompareCondition condition = CompareCondition.parse(ClockMock.frozen(), "_id", parser); assertThat(condition, notNullValue()); assertThat(condition.getPath(), is("key1.key2")); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/NeverConditionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/NeverConditionTests.java index 827375cb94d..5c1561d085c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/NeverConditionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/NeverConditionTests.java @@ -15,7 +15,7 @@ import static org.hamcrest.Matchers.containsString; public class NeverConditionTests extends ESTestCase { public void testExecute() throws Exception { - Condition executable = NeverCondition.INSTANCE; + ExecutableCondition executable = NeverCondition.INSTANCE; assertFalse(executable.execute(null).met()); } @@ -26,7 +26,7 @@ public class NeverConditionTests extends ESTestCase { XContentParser parser = createParser(builder); parser.nextToken(); - Condition executable = NeverCondition.parse("_id", parser); + ExecutableCondition executable = NeverCondition.parse("_id", parser); assertFalse(executable.execute(null).met()); } @@ -40,7 +40,7 @@ public class NeverConditionTests extends ESTestCase { try { NeverCondition.parse("_id", parser); fail("expected a condition exception trying to parse an invalid condition XContent, [" - + AlwaysCondition.TYPE + "] condition should not parse with a body"); + + InternalAlwaysCondition.TYPE + "] condition should not parse with a body"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("expected an empty object but found [foo]")); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java index 64df52ca37e..6812d58ee0a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java @@ -115,7 +115,7 @@ public class ScriptConditionTests extends ESTestCase { XContentParser parser = createParser(builder); parser.nextToken(); - ScriptCondition executable = ScriptCondition.parse(scriptService, "_watch", parser); + ExecutableCondition executable = ScriptCondition.parse(scriptService, "_watch", parser); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/crypto/CryptoServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/crypto/CryptoServiceTests.java index c7db484bc34..42451cf23ac 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/crypto/CryptoServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/crypto/CryptoServiceTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.watcher.Watcher; +import org.elasticsearch.xpack.watcher.WatcherField; import org.junit.Before; import javax.crypto.KeyGenerator; @@ -25,7 +25,7 @@ public class CryptoServiceTests extends ESTestCase { @Before public void init() throws Exception { MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setFile(Watcher.ENCRYPTION_KEY_SETTING.getKey(), generateKey()); + mockSecureSettings.setFile(WatcherField.ENCRYPTION_KEY_SETTING.getKey(), generateKey()); settings = Settings.builder() .setSecureSettings(mockSecureSettings) .build(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java index 3971f816bba..e237618927d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java @@ -29,6 +29,11 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.watcher.actions.ActionWrapperResult; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.Condition; +import org.elasticsearch.xpack.watcher.condition.ExecutableCondition; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; import org.elasticsearch.xpack.watcher.actions.Action; import org.elasticsearch.xpack.watcher.actions.ActionStatus; @@ -36,8 +41,6 @@ import org.elasticsearch.xpack.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.watcher.actions.ExecutableAction; import org.elasticsearch.xpack.watcher.actions.throttler.ActionThrottler; import org.elasticsearch.xpack.watcher.actions.throttler.Throttler; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; -import org.elasticsearch.xpack.watcher.condition.Condition; import org.elasticsearch.xpack.watcher.condition.NeverCondition; import org.elasticsearch.xpack.watcher.history.HistoryStore; import org.elasticsearch.xpack.watcher.history.WatchRecord; @@ -108,7 +111,7 @@ public class ExecutionServiceTests extends ESTestCase { private ExecutionService executionService; private Clock clock; private Client client; - private Watch.Parser parser; + private WatchParser parser; @Before public void init() throws Exception { @@ -130,7 +133,7 @@ public class ExecutionServiceTests extends ESTestCase { ThreadPool threadPool = mock(ThreadPool.class); when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); - parser = mock(Watch.Parser.class); + parser = mock(WatchParser.class); DiscoveryNode discoveryNode = new DiscoveryNode("node_1", ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(), new HashSet<>(asList(DiscoveryNode.Role.values())), Version.CURRENT); @@ -155,8 +158,8 @@ public class ExecutionServiceTests extends ESTestCase { TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5)); when(parser.parseWithSecrets(eq(watch.id()), eq(true), any(), any(), any())).thenReturn(watch); - Condition.Result conditionResult = AlwaysCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + Condition.Result conditionResult = InternalAlwaysCondition.RESULT_INSTANCE; + ExecutableCondition condition = mock(ExecutableCondition.class); // introduce a very short sleep time which we can use to check if the duration in milliseconds is correctly created long randomConditionDurationMs = randomIntBetween(5, 10); when(condition.execute(any(WatchExecutionContext.class))).then(invocationOnMock -> { @@ -178,11 +181,11 @@ public class ExecutionServiceTests extends ESTestCase { when(throttler.throttle("_action", context)).thenReturn(throttleResult); // action level conditional - Condition actionCondition = null; + ExecutableCondition actionCondition = null; Condition.Result actionConditionResult = null; if (randomBoolean()) { - Tuple pair = whenCondition(context); + Tuple pair = whenCondition(context); actionCondition = pair.v1(); actionConditionResult = pair.v2(); @@ -221,7 +224,7 @@ public class ExecutionServiceTests extends ESTestCase { assertThat(watchRecord.result().conditionResult(), sameInstance(conditionResult)); assertThat(watchRecord.result().transformResult(), sameInstance(watchTransformResult)); assertThat(watchRecord.getNodeId(), is("node_1")); - ActionWrapper.Result result = watchRecord.result().actionsResults().get("_action"); + ActionWrapperResult result = watchRecord.result().actionsResults().get("_action"); assertThat(result, notNullValue()); assertThat(result.id(), is("_action")); assertThat(result.condition(), sameInstance(actionConditionResult)); @@ -265,8 +268,8 @@ public class ExecutionServiceTests extends ESTestCase { when(inputResult.getException()).thenReturn(new IOException()); when(input.execute(eq(context), any(Payload.class))).thenReturn(inputResult); - Condition.Result conditionResult = AlwaysCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + Condition.Result conditionResult = InternalAlwaysCondition.RESULT_INSTANCE; + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(any(WatchExecutionContext.class))).thenReturn(conditionResult); // watch level transform @@ -282,7 +285,7 @@ public class ExecutionServiceTests extends ESTestCase { when(throttler.throttle("_action", context)).thenReturn(throttleResult); // action level condition (unused) - Condition actionCondition = randomBoolean() ? mock(Condition.class) : null; + ExecutableCondition actionCondition = randomBoolean() ? mock(ExecutableCondition.class) : null; // action level transform (unused) ExecutableTransform actionTransform = randomBoolean() ? mock(ExecutableTransform.class) : null; @@ -328,7 +331,7 @@ public class ExecutionServiceTests extends ESTestCase { TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5)); when(parser.parseWithSecrets(eq(watch.id()), eq(true), any(), any(), any())).thenReturn(watch); - Condition condition = mock(Condition.class); + ExecutableCondition condition = mock(ExecutableCondition.class); Condition.Result conditionResult = mock(Condition.Result.class); when(conditionResult.status()).thenReturn(Condition.Result.Status.FAILURE); when(conditionResult.reason()).thenReturn("_reason"); @@ -347,7 +350,7 @@ public class ExecutionServiceTests extends ESTestCase { when(throttler.throttle("_action", context)).thenReturn(throttleResult); // action level condition (unused) - Condition actionCondition = randomBoolean() ? mock(Condition.class) : null; + ExecutableCondition actionCondition = randomBoolean() ? mock(ExecutableCondition.class) : null; // action level transform (unused) ExecutableTransform actionTransform = randomBoolean() ? mock(ExecutableTransform.class) : null; @@ -393,8 +396,8 @@ public class ExecutionServiceTests extends ESTestCase { TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5)); when(parser.parseWithSecrets(eq(watch.id()), eq(true), any(), any(), any())).thenReturn(watch); - Condition.Result conditionResult = AlwaysCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + Condition.Result conditionResult = InternalAlwaysCondition.RESULT_INSTANCE; + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(any(WatchExecutionContext.class))).thenReturn(conditionResult); // watch level transform @@ -411,7 +414,7 @@ public class ExecutionServiceTests extends ESTestCase { when(throttler.throttle("_action", context)).thenReturn(throttleResult); // action level condition (unused) - Condition actionCondition = randomBoolean() ? mock(Condition.class) : null; + ExecutableCondition actionCondition = randomBoolean() ? mock(ExecutableCondition.class) : null; // action level transform (unused) ExecutableTransform actionTransform = randomBoolean() ? mock(ExecutableTransform.class) : null; @@ -457,8 +460,8 @@ public class ExecutionServiceTests extends ESTestCase { ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now); TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5)); - Condition.Result conditionResult = AlwaysCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + Condition.Result conditionResult = InternalAlwaysCondition.RESULT_INSTANCE; + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(any(WatchExecutionContext.class))).thenReturn(conditionResult); // watch level transform @@ -475,11 +478,11 @@ public class ExecutionServiceTests extends ESTestCase { when(throttler.throttle("_action", context)).thenReturn(throttleResult); // action level condition - Condition actionCondition = null; + ExecutableCondition actionCondition = null; Condition.Result actionConditionResult = null; if (randomBoolean()) { - Tuple pair = whenCondition(context); + Tuple pair = whenCondition(context); actionCondition = pair.v1(); actionConditionResult = pair.v2(); @@ -535,8 +538,8 @@ public class ExecutionServiceTests extends ESTestCase { TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5)); context.ensureWatchExists(() -> watch); - Condition.Result conditionResult = AlwaysCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + Condition.Result conditionResult = InternalAlwaysCondition.RESULT_INSTANCE; + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(any(WatchExecutionContext.class))).thenReturn(conditionResult); // watch level transform @@ -552,11 +555,11 @@ public class ExecutionServiceTests extends ESTestCase { when(throttler.throttle("_action", context)).thenReturn(throttleResult); // action level conditional - Condition actionCondition = null; + ExecutableCondition actionCondition = null; Condition.Result actionConditionResult = null; if (randomBoolean()) { - Tuple pair = whenCondition(context); + Tuple pair = whenCondition(context); actionCondition = pair.v1(); actionConditionResult = pair.v2(); @@ -592,7 +595,7 @@ public class ExecutionServiceTests extends ESTestCase { WatchRecord watchRecord = executionService.executeInner(context); assertThat(watchRecord.result().conditionResult(), sameInstance(conditionResult)); assertThat(watchRecord.result().transformResult(), sameInstance(watchTransformResult)); - ActionWrapper.Result result = watchRecord.result().actionsResults().get("_action"); + ActionWrapperResult result = watchRecord.result().actionsResults().get("_action"); assertThat(result, notNullValue()); assertThat(result.id(), is("_action")); assertThat(result.condition(), sameInstance(actionConditionResult)); @@ -611,8 +614,8 @@ public class ExecutionServiceTests extends ESTestCase { TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5)); context.ensureWatchExists(() -> watch); - Condition.Result conditionResult = AlwaysCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + Condition.Result conditionResult = InternalAlwaysCondition.RESULT_INSTANCE; + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(any(WatchExecutionContext.class))).thenReturn(conditionResult); // action throttler @@ -623,7 +626,7 @@ public class ExecutionServiceTests extends ESTestCase { when(throttler.throttle("_action", context)).thenReturn(throttleResult); // unused with throttle - Condition actionCondition = mock(Condition.class); + ExecutableCondition actionCondition = mock(ExecutableCondition.class); ExecutableTransform actionTransform = mock(ExecutableTransform.class); ExecutableAction action = mock(ExecutableAction.class); @@ -641,7 +644,7 @@ public class ExecutionServiceTests extends ESTestCase { assertThat(watchRecord.result().conditionResult(), sameInstance(conditionResult)); assertThat(watchRecord.result().transformResult(), nullValue()); assertThat(watchRecord.result().actionsResults().size(), is(1)); - ActionWrapper.Result result = watchRecord.result().actionsResults().get("_action"); + ActionWrapperResult result = watchRecord.result().actionsResults().get("_action"); assertThat(result, notNullValue()); assertThat(result.id(), is("_action")); assertThat(result.condition(), nullValue()); @@ -663,8 +666,8 @@ public class ExecutionServiceTests extends ESTestCase { TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5)); context.ensureWatchExists(() -> watch); - Condition.Result conditionResult = AlwaysCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + Condition.Result conditionResult = InternalAlwaysCondition.RESULT_INSTANCE; + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(any(WatchExecutionContext.class))).thenReturn(conditionResult); // action throttler @@ -682,7 +685,7 @@ public class ExecutionServiceTests extends ESTestCase { when(actionConditionResult.status()).thenReturn(Condition.Result.Status.FAILURE); } when(actionConditionResult.met()).thenReturn(false); - Condition actionCondition = mock(Condition.class); + ExecutableCondition actionCondition = mock(ExecutableCondition.class); when(actionCondition.execute(context)).thenReturn(actionConditionResult); // unused with failed condition @@ -703,7 +706,7 @@ public class ExecutionServiceTests extends ESTestCase { assertThat(watchRecord.result().conditionResult(), sameInstance(conditionResult)); assertThat(watchRecord.result().transformResult(), nullValue()); assertThat(watchRecord.result().actionsResults().size(), is(1)); - ActionWrapper.Result result = watchRecord.result().actionsResults().get("_action"); + ActionWrapperResult result = watchRecord.result().actionsResults().get("_action"); assertThat(result, notNullValue()); assertThat(result.id(), is("_action")); assertThat(result.condition(), sameInstance(actionConditionResult)); @@ -726,8 +729,8 @@ public class ExecutionServiceTests extends ESTestCase { TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5)); context.ensureWatchExists(() -> watch); - Condition.Result conditionResult = AlwaysCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + Condition.Result conditionResult = InternalAlwaysCondition.RESULT_INSTANCE; + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(any(WatchExecutionContext.class))).thenReturn(conditionResult); // action throttler @@ -737,7 +740,7 @@ public class ExecutionServiceTests extends ESTestCase { when(throttler.throttle("_action", context)).thenReturn(throttleResult); // action condition (always fails) - Condition actionCondition = mock(Condition.class); + ExecutableCondition actionCondition = mock(ExecutableCondition.class); when(actionCondition.execute(context)).thenThrow(new IllegalArgumentException("[expected] failed for test")); // unused with failed condition @@ -759,7 +762,7 @@ public class ExecutionServiceTests extends ESTestCase { assertThat(watchRecord.result().conditionResult(), sameInstance(conditionResult)); assertThat(watchRecord.result().transformResult(), nullValue()); assertThat(watchRecord.result().actionsResults().size(), is(1)); - ActionWrapper.Result result = watchRecord.result().actionsResults().get("_action"); + ActionWrapperResult result = watchRecord.result().actionsResults().get("_action"); assertThat(result, notNullValue()); assertThat(result.id(), is("_action")); assertThat(result.condition(), nullValue()); @@ -782,7 +785,7 @@ public class ExecutionServiceTests extends ESTestCase { context.ensureWatchExists(() -> watch); Condition.Result conditionResult = NeverCondition.RESULT_INSTANCE; - Condition condition = mock(Condition.class); + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(any(WatchExecutionContext.class))).thenReturn(conditionResult); // watch level transform @@ -790,7 +793,7 @@ public class ExecutionServiceTests extends ESTestCase { // action throttler ActionThrottler throttler = mock(ActionThrottler.class); - Condition actionCondition = mock(Condition.class); + ExecutableCondition actionCondition = mock(ExecutableCondition.class); ExecutableTransform actionTransform = mock(ExecutableTransform.class); ExecutableAction action = mock(ExecutableAction.class); ActionWrapper actionWrapper = new ActionWrapper("_action", throttler, actionCondition, actionTransform, action); @@ -873,7 +876,7 @@ public class ExecutionServiceTests extends ESTestCase { WatchStatus watchStatus = new WatchStatus(now, singletonMap("_action", new ActionStatus(now))); when(watch.input()).thenReturn(input); - when(watch.condition()).thenReturn(AlwaysCondition.INSTANCE); + when(watch.condition()).thenReturn(InternalAlwaysCondition.INSTANCE); when(watch.actions()).thenReturn(Collections.singletonList(actionWrapper)); when(watch.status()).thenReturn(watchStatus); @@ -1003,7 +1006,7 @@ public class ExecutionServiceTests extends ESTestCase { public void testUpdateWatchStatusDoesNotUpdateState() throws Exception { WatchStatus status = new WatchStatus(DateTime.now(UTC), Collections.emptyMap()); - Watch watch = new Watch("_id", new ManualTrigger(), new ExecutableNoneInput(logger), AlwaysCondition.INSTANCE, null, null, + Watch watch = new Watch("_id", new ManualTrigger(), new ExecutableNoneInput(logger), InternalAlwaysCondition.INSTANCE, null, null, Collections.emptyList(), null, status); final AtomicBoolean assertionsTriggered = new AtomicBoolean(false); @@ -1047,10 +1050,10 @@ public class ExecutionServiceTests extends ESTestCase { return ctx; } - private Tuple whenCondition(final WatchExecutionContext context) { + private Tuple whenCondition(final WatchExecutionContext context) { Condition.Result conditionResult = mock(Condition.Result.class); when(conditionResult.met()).thenReturn(true); - Condition condition = mock(Condition.class); + ExecutableCondition condition = mock(ExecutableCondition.class); when(condition.execute(context)).thenReturn(conditionResult); return new Tuple<>(condition, conditionResult); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index 5f60525ae4e..e9f8d766911 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -14,9 +14,10 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.condition.Condition; +import org.elasticsearch.xpack.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.watcher.condition.NeverCondition; import org.elasticsearch.xpack.watcher.condition.ScriptCondition; import org.elasticsearch.xpack.watcher.execution.ExecutionState; @@ -49,13 +50,14 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC private final Input input = simpleInput("key", 15).build(); - private final Condition scriptConditionPasses = mockScriptCondition("return true;"); - private final Condition compareConditionPasses = new CompareCondition("ctx.payload.key", CompareCondition.Op.GTE, 15); - private final Condition conditionPasses = randomFrom(AlwaysCondition.INSTANCE, scriptConditionPasses, compareConditionPasses); + private final ExecutableCondition scriptConditionPasses = mockScriptCondition("return true;"); + private final ExecutableCondition compareConditionPasses = new CompareCondition("ctx.payload.key", CompareCondition.Op.GTE, 15); + private final ExecutableCondition conditionPasses = randomFrom(InternalAlwaysCondition.INSTANCE, + scriptConditionPasses, compareConditionPasses); - private final Condition scriptConditionFails = mockScriptCondition("return false;"); - private final Condition compareConditionFails = new CompareCondition("ctx.payload.key", CompareCondition.Op.LT, 15); - private final Condition conditionFails = randomFrom(NeverCondition.INSTANCE, scriptConditionFails, compareConditionFails); + private final ExecutableCondition scriptConditionFails = mockScriptCondition("return false;"); + private final ExecutableCondition compareConditionFails = new CompareCondition("ctx.payload.key", CompareCondition.Op.LT, 15); + private final ExecutableCondition conditionFails = randomFrom(NeverCondition.INSTANCE, scriptConditionFails, compareConditionFails); @Override protected List> pluginTypes() { @@ -88,9 +90,9 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC public void testActionConditionWithHardFailures() throws Exception { final String id = "testActionConditionWithHardFailures"; - final Condition scriptConditionFailsHard = mockScriptCondition("throw new IllegalStateException('failed');"); - final List actionConditionsWithFailure = - Arrays.asList(scriptConditionFailsHard, conditionPasses, AlwaysCondition.INSTANCE); + final ExecutableCondition scriptConditionFailsHard = mockScriptCondition("throw new IllegalStateException('failed');"); + final List actionConditionsWithFailure = + Arrays.asList(scriptConditionFailsHard, conditionPasses, InternalAlwaysCondition.INSTANCE); Collections.shuffle(actionConditionsWithFailure, random()); @@ -135,7 +137,11 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC @SuppressWarnings("unchecked") public void testActionConditionWithFailures() throws Exception { final String id = "testActionConditionWithFailures"; - final Condition[] actionConditionsWithFailure = new Condition[] { conditionFails, conditionPasses, AlwaysCondition.INSTANCE }; + final ExecutableCondition[] actionConditionsWithFailure = new ExecutableCondition[] { + conditionFails, + conditionPasses, + InternalAlwaysCondition.INSTANCE + }; Collections.shuffle(Arrays.asList(actionConditionsWithFailure), random()); final int failedIndex = Arrays.asList(actionConditionsWithFailure).indexOf(conditionFails); @@ -175,13 +181,13 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC @SuppressWarnings("unchecked") public void testActionCondition() throws Exception { final String id = "testActionCondition"; - final List actionConditions = new ArrayList<>(); + final List actionConditions = new ArrayList<>(); //actionConditions.add(conditionPasses); - actionConditions.add(AlwaysCondition.INSTANCE); + actionConditions.add(InternalAlwaysCondition.INSTANCE); /* if (randomBoolean()) { - actionConditions.add(AlwaysCondition.INSTANCE); + actionConditions.add(InternalAlwaysCondition.INSTANCE); } Collections.shuffle(actionConditions, random()); @@ -237,7 +243,10 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC * @param actionConditions The conditions to add to the Watch */ private void putAndTriggerWatch(final String id, final Input input, final Condition... actionConditions) { - WatchSourceBuilder source = watchBuilder().trigger(schedule(interval("5s"))).input(input).condition(AlwaysCondition.INSTANCE); + WatchSourceBuilder source = watchBuilder() + .trigger(schedule(interval("5s"))) + .input(input) + .condition(InternalAlwaysCondition.INSTANCE); for (int i = 0; i < actionConditions.length; ++i) { source.addAction("action" + i, actionConditions[i], loggingAction(Integer.toString(i))); @@ -256,7 +265,7 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC * @param inlineScript The script to "compile" and run * @return Never {@code null} */ - private static Condition mockScriptCondition(String inlineScript) { + private static ExecutableCondition mockScriptCondition(String inlineScript) { Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, inlineScript, Collections.emptyMap()); return new ScriptCondition(script); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java index 386affd815f..ff37198759d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.watcher.actions.ActionStatus; -import org.elasticsearch.xpack.watcher.actions.ActionWrapper; +import org.elasticsearch.xpack.watcher.actions.ActionWrapperResult; import org.elasticsearch.xpack.watcher.actions.jira.JiraAction; import org.elasticsearch.xpack.watcher.common.http.HttpClient; import org.elasticsearch.xpack.watcher.common.http.HttpRequest; @@ -133,7 +133,7 @@ public class HistoryStoreTests extends ESTestCase { JiraAccount account = new JiraAccount("_account", settings, httpClient); JiraIssue jiraIssue = account.createIssue(singletonMap("foo", "bar"), null); - ActionWrapper.Result result = new ActionWrapper.Result(JiraAction.TYPE, new JiraAction.Executed(jiraIssue)); + ActionWrapperResult result = new ActionWrapperResult(JiraAction.TYPE, new JiraAction.Executed(jiraIssue)); DateTime now = new DateTime(0, UTC); Wid wid = new Wid("_name", now); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index 486e85be448..10975e26241 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.notification.email.EmailTemplate; import org.elasticsearch.xpack.watcher.notification.email.support.EmailServer; @@ -68,7 +68,7 @@ public class HistoryTemplateEmailMappingsTests extends AbstractWatcherIntegratio PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id").setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput()) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_email", emailAction(EmailTemplate.builder() .from("from@example.com") .to("to1@example.com", "to2@example.com") diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 35fd0c7d6e1..d6f324d29f1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.xpack.watcher.common.http.HttpMethod; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -67,7 +67,7 @@ public class HistoryTemplateHttpMappingsTests extends AbstractWatcherIntegration PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id").setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(httpInput(HttpRequestTemplate.builder("localhost", webServer.getPort()).path("/input/path"))) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_webhook", webhookAction(HttpRequestTemplate.builder("localhost", webServer.getPort()) .path("/webhook/path") .method(HttpMethod.POST) @@ -133,7 +133,7 @@ public class HistoryTemplateHttpMappingsTests extends AbstractWatcherIntegration .input(httpInput(HttpRequestTemplate.builder("localhost", webServer.getPort()) .path("/") .readTimeout(abortAtInput ? TimeValue.timeValueMillis(10) : TimeValue.timeValueSeconds(10)))) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_webhook", webhookAction(HttpRequestTemplate.builder("localhost", webServer.getPort()) .readTimeout(TimeValue.timeValueMillis(10)) .path("/webhook/path") diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index d5febecaeb2..a78976011e4 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -47,7 +47,7 @@ public class HistoryTemplateSearchInputMappingsTests extends AbstractWatcherInte PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id").setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(searchInput(request)) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("logger", loggingAction("indexed"))) .get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java index 708ec313b01..153d4bd829e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateTimeMappingsTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse; @@ -35,7 +35,7 @@ public class HistoryTemplateTimeMappingsTests extends AbstractWatcherIntegration PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_id").setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput()) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_logging", loggingAction("foobar"))) .get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailSecretsIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailSecretsIntegrationTests.java index b73b114681a..43a02037401 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailSecretsIntegrationTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailSecretsIntegrationTests.java @@ -9,10 +9,10 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.xpack.watcher.Watcher; +import org.elasticsearch.xpack.watcher.WatcherField; import org.elasticsearch.xpack.watcher.actions.ActionBuilders; import org.elasticsearch.xpack.watcher.client.WatcherClient; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.crypto.CryptoService; import org.elasticsearch.xpack.watcher.crypto.CryptoServiceTests; import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode; @@ -75,7 +75,7 @@ public class EmailSecretsIntegrationTests extends AbstractWatcherIntegrationTest .put("xpack.watcher.encrypt_sensitive_data", encryptSensitiveData); if (encryptSensitiveData) { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setFile(Watcher.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); + secureSettings.setFile(WatcherField.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); builder.setSecureSettings(secureSettings); } return builder.build(); @@ -87,7 +87,7 @@ public class EmailSecretsIntegrationTests extends AbstractWatcherIntegrationTest .setSource(watchBuilder() .trigger(schedule(cron("0 0 0 1 * ? 2020"))) .input(simpleInput()) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_email", ActionBuilders.emailAction( EmailTemplate.builder() .from("_from") @@ -106,7 +106,7 @@ public class EmailSecretsIntegrationTests extends AbstractWatcherIntegrationTest if (encryptSensitiveData) { assertThat(value, not(is(EmailServer.PASSWORD))); MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setFile(Watcher.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); + mockSecureSettings.setFile(WatcherField.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); CryptoService cryptoService = new CryptoService(settings); assertThat(new String(cryptoService.decrypt(((String) value).toCharArray())), is(EmailServer.PASSWORD)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 173666d9282..53b8ac4c5d3 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -60,6 +60,7 @@ import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.watcher.transport.actions.stats.WatcherStatsResponse; import org.elasticsearch.xpack.watcher.trigger.ScheduleTriggerEngineMock; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; import org.hamcrest.Matcher; import org.joda.time.DateTime; @@ -320,8 +321,8 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase return internalCluster().getInstance(type, internalCluster().getMasterName()); } - protected Watch.Parser watchParser() { - return getInstanceFromMaster(Watch.Parser.class); + protected WatchParser watchParser() { + return getInstanceFromMaster(WatchParser.class); } public AbstractWatcherIntegrationTestCase() { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java index ca967f0b1f4..265967213ee 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherTestUtils.java @@ -27,7 +27,7 @@ import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.secret.Secret; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.execution.TriggeredExecutionContext; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.execution.Wid; @@ -127,7 +127,7 @@ public final class WatcherTestUtils { Watch watch = new Watch("test-watch", new ScheduleTrigger(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.MINUTES))), new ExecutableSimpleInput(new SimpleInput(new Payload.Simple()), logger), - AlwaysCondition.INSTANCE, + InternalAlwaysCondition.INSTANCE, null, null, new ArrayList<>(), @@ -174,7 +174,7 @@ public final class WatcherTestUtils { watchName, new ScheduleTrigger(new CronSchedule("0/5 * * * * ? *")), new ExecutableSimpleInput(new SimpleInput(new Payload.Simple(Collections.singletonMap("bar", "foo"))), logger), - AlwaysCondition.INSTANCE, + InternalAlwaysCondition.INSTANCE, new ExecutableSearchTransform(searchTransform, logger, client, searchTemplateService, TimeValue.timeValueMinutes(1)), new TimeValue(0), actions, diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/bench/ScheduleEngineTriggerBenchmark.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/bench/ScheduleEngineTriggerBenchmark.java index 0fc6d6fd034..b6a23ecf6a3 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/bench/ScheduleEngineTriggerBenchmark.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/bench/ScheduleEngineTriggerBenchmark.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput; import org.elasticsearch.xpack.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleRegistry; @@ -62,7 +62,7 @@ public class ScheduleEngineTriggerBenchmark { List watches = new ArrayList<>(numWatches); for (int i = 0; i < numWatches; i++) { watches.add(new Watch("job_" + i, new ScheduleTrigger(interval(interval + "s")), new ExecutableNoneInput(logger), - AlwaysCondition.INSTANCE, null, null, Collections.emptyList(), null, null)); + InternalAlwaysCondition.INSTANCE, null, null, Collections.emptyList(), null, null)); } ScheduleRegistry scheduleRegistry = new ScheduleRegistry(emptySet()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java index 5a3ab55ebe8..a4760eb7368 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.watcher.client.WatcherClient; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; @@ -268,7 +268,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase { .setSource(watchBuilder() .trigger(schedule(interval(-5, IntervalSchedule.Interval.Unit.SECONDS))) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_logger", loggingAction("executed!"))) .get(); fail("put watch should have failed"); @@ -281,7 +281,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase { .setSource(watchBuilder() .trigger(schedule(hourly().minutes(-10).build())) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_logger", loggingAction("executed!"))) .get(); fail("put watch should have failed"); @@ -294,7 +294,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase { .setSource(watchBuilder() .trigger(schedule(daily().atRoundHour(-10).build())) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_logger", loggingAction("executed!"))) .get(); fail("put watch should have failed"); @@ -308,7 +308,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase { .setSource(watchBuilder() .trigger(schedule(weekly().time(WeekTimes.builder().atRoundHour(-10).build()).build())) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_logger", loggingAction("executed!"))) .get(); fail("put watch should have failed"); @@ -322,7 +322,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase { .setSource(watchBuilder() .trigger(schedule(monthly().time(MonthTimes.builder().atRoundHour(-10).build()).build())) .input(simpleInput("key", "value")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_logger", loggingAction("executed!"))) .get(); fail("put watch should have failed"); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java index 0632239da28..3c98176ff73 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java @@ -13,9 +13,9 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.CompareCondition; -import org.elasticsearch.xpack.watcher.condition.Condition; +import org.elasticsearch.xpack.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.execution.TriggeredWatch; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.elasticsearch.xpack.watcher.transport.actions.stats.WatcherStatsResponse; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchField; import org.hamcrest.Matchers; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -71,12 +72,12 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase { public void testLoadMalformedWatchRecord() throws Exception { client().prepareIndex(Watch.INDEX, Watch.DOC_TYPE, "_id") .setSource(jsonBuilder().startObject() - .startObject(Watch.Field.TRIGGER.getPreferredName()) + .startObject(WatchField.TRIGGER.getPreferredName()) .startObject("schedule") .field("cron", "0/5 * * * * ? 2050") .endObject() .endObject() - .startObject(Watch.Field.ACTIONS.getPreferredName()) + .startObject(WatchField.ACTIONS.getPreferredName()) .endObject() .endObject()) .get(); @@ -85,17 +86,17 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase { DateTime now = DateTime.now(UTC); Wid wid = new Wid("_id", now); ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now); - Condition condition = AlwaysCondition.INSTANCE; + ExecutableCondition condition = InternalAlwaysCondition.INSTANCE; String index = HistoryStore.getHistoryIndexNameForTime(now); client().prepareIndex(index, HistoryStore.DOC_TYPE, wid.value()) .setSource(jsonBuilder().startObject() .startObject(WatchRecord.TRIGGER_EVENT.getPreferredName()) .field(event.type(), event) .endObject() - .startObject(Watch.Field.CONDITION.getPreferredName()) + .startObject(WatchField.CONDITION.getPreferredName()) .field(condition.type(), condition) .endObject() - .startObject(Watch.Field.INPUT.getPreferredName()) + .startObject(WatchField.INPUT.getPreferredName()) .startObject("none").endObject() .endObject() .endObject()) @@ -110,10 +111,10 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase { .startObject(WatchRecord.TRIGGER_EVENT.getPreferredName()) .field(event.type(), event) .endObject() - .startObject(Watch.Field.CONDITION.getPreferredName()) + .startObject(WatchField.CONDITION.getPreferredName()) .startObject("unknown").endObject() .endObject() - .startObject(Watch.Field.INPUT.getPreferredName()) + .startObject(WatchField.INPUT.getPreferredName()) .startObject("none").endObject() .endObject() .endObject()) @@ -128,10 +129,10 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase { .startObject(WatchRecord.TRIGGER_EVENT.getPreferredName()) .startObject("unknown").endObject() .endObject() - .startObject(Watch.Field.CONDITION.getPreferredName()) + .startObject(WatchField.CONDITION.getPreferredName()) .field(condition.type(), condition) .endObject() - .startObject(Watch.Field.INPUT.getPreferredName()) + .startObject(WatchField.INPUT.getPreferredName()) .startObject("none").endObject() .endObject() .endObject()) @@ -195,7 +196,7 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase { watcherClient().preparePutWatch(watchId).setSource(watchBuilder() .trigger(schedule(cron("0/5 * * * * ? 2050"))) .input(searchInput(request)) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", indexAction("output", "test")) .defaultThrottlePeriod(TimeValue.timeValueMillis(0)) ).get(); @@ -239,7 +240,7 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase { watcherClient().preparePutWatch(watchId).setSource(watchBuilder() .trigger(schedule(cron("0/5 * * * * ? 2050"))) .input(searchInput(request)) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", indexAction("output", "test")) .defaultThrottlePeriod(TimeValue.timeValueMillis(0)) ).get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HipChatServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HipChatServiceTests.java index 51bdc8f6692..073966bc01b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HipChatServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HipChatServiceTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.xpack.watcher.WatcherService; import org.elasticsearch.xpack.watcher.WatcherState; import org.elasticsearch.xpack.watcher.actions.hipchat.HipChatAction; import org.elasticsearch.xpack.watcher.client.WatcherClient; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.history.HistoryStore; import org.elasticsearch.xpack.watcher.notification.hipchat.HipChatAccount; import org.elasticsearch.xpack.watcher.notification.hipchat.HipChatMessage; @@ -170,7 +170,7 @@ public class HipChatServiceTests extends XPackSingleNodeTestCase { PutWatchResponse putWatchResponse = watcherClient.preparePutWatch(id).setSource(watchBuilder() .trigger(schedule(interval("10m"))) .input(simpleInput("ref", "HipChatServiceTests#testWatchWithHipChatAction")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("hipchat", actionBuilder)) .execute().get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HttpSecretsIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HttpSecretsIntegrationTests.java index a178eeb7854..aed553f76c1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HttpSecretsIntegrationTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/HttpSecretsIntegrationTests.java @@ -11,12 +11,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.xpack.watcher.Watcher; +import org.elasticsearch.xpack.watcher.WatcherField; import org.elasticsearch.xpack.watcher.client.WatcherClient; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.auth.basic.ApplicableBasicAuth; import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuth; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.crypto.CryptoService; import org.elasticsearch.xpack.watcher.crypto.CryptoServiceTests; import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode; @@ -78,7 +78,7 @@ public class HttpSecretsIntegrationTests extends AbstractWatcherIntegrationTestC } if (encryptSensitiveData) { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setFile(Watcher.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); + secureSettings.setFile(WatcherField.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put("xpack.watcher.encrypt_sensitive_data", encryptSensitiveData) @@ -96,7 +96,7 @@ public class HttpSecretsIntegrationTests extends AbstractWatcherIntegrationTestC .input(httpInput(HttpRequestTemplate.builder(webServer.getHostName(), webServer.getPort()) .path("/") .auth(new BasicAuth(USERNAME, PASSWORD.toCharArray())))) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_logging", loggingAction("executed"))) .get(); @@ -111,7 +111,7 @@ public class HttpSecretsIntegrationTests extends AbstractWatcherIntegrationTestC if (encryptSensitiveData) { assertThat(value, not(is((Object) PASSWORD))); MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setFile(Watcher.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); + mockSecureSettings.setFile(WatcherField.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); CryptoService cryptoService = new CryptoService(settings); assertThat(new String(cryptoService.decrypt(((String) value).toCharArray())), is(PASSWORD)); @@ -161,7 +161,7 @@ public class HttpSecretsIntegrationTests extends AbstractWatcherIntegrationTestC .setSource(watchBuilder() .trigger(schedule(cron("0 0 0 1 * ? 2020"))) .input(simpleInput()) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_webhook", webhookAction(HttpRequestTemplate.builder(webServer.getHostName(), webServer.getPort()) .path("/") .auth(new BasicAuth(USERNAME, PASSWORD.toCharArray()))))) @@ -179,7 +179,7 @@ public class HttpSecretsIntegrationTests extends AbstractWatcherIntegrationTestC if (encryptSensitiveData) { assertThat(value, not(is((Object) PASSWORD))); MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setFile(Watcher.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); + mockSecureSettings.setFile(WatcherField.ENCRYPTION_KEY_SETTING.getKey(), encryptionKey); Settings settings = Settings.builder().setSecureSettings(mockSecureSettings).build(); CryptoService cryptoService = new CryptoService(settings); assertThat(new String(cryptoService.decrypt(((String) value).toCharArray())), is(PASSWORD)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/PagerDutyServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/PagerDutyServiceTests.java index d0275a94c79..55a7bc54323 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/PagerDutyServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/PagerDutyServiceTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.xpack.watcher.actions.pagerduty.PagerDutyAction; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.notification.pagerduty.IncidentEvent; import org.elasticsearch.xpack.watcher.notification.pagerduty.IncidentEventContext; import org.elasticsearch.xpack.watcher.notification.pagerduty.PagerDutyAccount; @@ -73,7 +73,7 @@ public class PagerDutyServiceTests extends AbstractWatcherIntegrationTestCase { PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("1").setSource(watchBuilder() .trigger(schedule(interval("10m"))) .input(simpleInput("ref", "testWatchWithPagerDutyAction()")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("pd", actionBuilder)) .execute().get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SlackServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SlackServiceTests.java index c1d1c707c52..806b1d640ea 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SlackServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SlackServiceTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.xpack.watcher.actions.slack.SlackAction; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.notification.slack.SentMessages; import org.elasticsearch.xpack.watcher.notification.slack.SlackAccount; import org.elasticsearch.xpack.watcher.notification.slack.SlackService; @@ -94,7 +94,7 @@ public class SlackServiceTests extends AbstractWatcherIntegrationTestCase { PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("1").setSource(watchBuilder() .trigger(schedule(interval("10m"))) .input(simpleInput("ref", "testWatchWithSlackAction()")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("slack", actionBuilder)) .execute().get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index f8c3337828c..895f37f0f8d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction; import org.elasticsearch.xpack.watcher.actions.logging.LoggingLevel; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode; import org.elasticsearch.xpack.watcher.history.HistoryStore; @@ -78,7 +78,7 @@ public class WatchMetadataTests extends AbstractWatcherIntegrationTestCase { .setSource(watchBuilder() .trigger(schedule(cron("0 0 0 1 1 ? 2050"))) .input(noneInput()) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("testLogger", loggingAction) .defaultThrottlePeriod(TimeValue.timeValueSeconds(0)) .metadata(metadata)) diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 7a05a4ce066..d2fe58536f9 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.support.search.WatcherSearchTemplateRequest; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse; @@ -121,7 +121,7 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput(MapBuilder.newMapBuilder().put("key1", 10).put("key2", 10))) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .transform(scriptTransform(script)) .addAction("_id", indexAction("output1", "type"))) .get(); @@ -131,7 +131,7 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput(MapBuilder.newMapBuilder().put("key1", 10).put("key2", 10))) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", scriptTransform(script), indexAction("output2", "type"))) .get(); assertThat(putWatchResponse.isCreated(), is(true)); @@ -211,7 +211,7 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput(MapBuilder.newMapBuilder().put("key1", 10).put("key2", 10))) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .transform(chainTransform(scriptTransform(script1), scriptTransform(script2))) .addAction("_id", indexAction("output1", "type"))) .get(); @@ -221,7 +221,7 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas .setSource(watchBuilder() .trigger(schedule(interval("5s"))) .input(simpleInput(MapBuilder.newMapBuilder().put("key1", 10).put("key2", 10))) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_id", chainTransform(scriptTransform(script1), scriptTransform(script2)), indexAction("output2", "type"))) .get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java index c76c1348036..655430bf623 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.watcher.transport.action.execute; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.watcher.actions.ActionStatus; import org.elasticsearch.xpack.watcher.client.WatcherClient; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode; import org.elasticsearch.xpack.watcher.execution.Wid; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; @@ -38,7 +38,7 @@ public class ExecuteWatchTests extends AbstractWatcherIntegrationTestCase { .setSource(watchBuilder() .trigger(schedule(cron("0/5 * * * * ? 2099"))) .input(simpleInput("foo", "bar")) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("log", loggingAction("_text"))) .get(); @@ -81,7 +81,7 @@ public class ExecuteWatchTests extends AbstractWatcherIntegrationTestCase { .trigger(schedule(interval("1s"))) // run every second so we can ack it .input(simpleInput("foo", "bar")) .defaultThrottlePeriod(TimeValue.timeValueMillis(0)) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("log", loggingAction("_text"))) .get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java index 81913534107..0477c6ca7ce 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/get/GetWatchTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.watcher.transport.action.get; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchRequest; @@ -35,7 +35,7 @@ public class GetWatchTests extends AbstractWatcherIntegrationTestCase { PutWatchResponse putResponse = watcherClient().preparePutWatch("_name").setSource(watchBuilder() .trigger(schedule(interval("5m"))) .input(simpleInput()) - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .addAction("_action1", loggingAction("{{ctx.watch_id}}"))) .get(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java index 7bdcde97833..d65ac4c39a0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.watcher.Watcher; import org.elasticsearch.xpack.watcher.test.WatchExecutionContextMockBuilder; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -54,7 +55,7 @@ public class TransportPutWatchActionTests extends ESTestCase { TransportService transportService = mock(TransportService.class); - Watch.Parser parser = mock(Watch.Parser.class); + WatchParser parser = mock(WatchParser.class); when(parser.parseWithSecrets(eq("_id"), eq(false), anyObject(), anyObject(), anyObject())).thenReturn(watch); Client client = mock(Client.class); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java index be415890cf6..de8b498aa0e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleEngineTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.watcher.trigger.schedule.engine; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.input.none.ExecutableNoneInput; import org.elasticsearch.xpack.watcher.trigger.TriggerEngine; import org.elasticsearch.xpack.watcher.trigger.TriggerEvent; @@ -251,7 +251,7 @@ public class TickerScheduleEngineTests extends ESTestCase { private Watch createWatch(String name, Schedule schedule) { return new Watch(name, new ScheduleTrigger(schedule), new ExecutableNoneInput(logger), - AlwaysCondition.INSTANCE, null, null, + InternalAlwaysCondition.INSTANCE, null, null, Collections.emptyList(), null, null); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index 55c751d7f54..5f1538f1704 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -50,13 +50,13 @@ import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry; import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuthFactory; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.condition.AlwaysConditionTests; import org.elasticsearch.xpack.watcher.condition.ArrayCompareCondition; import org.elasticsearch.xpack.watcher.condition.CompareCondition; -import org.elasticsearch.xpack.watcher.condition.Condition; import org.elasticsearch.xpack.watcher.condition.ConditionFactory; import org.elasticsearch.xpack.watcher.condition.ConditionRegistry; +import org.elasticsearch.xpack.watcher.condition.ExecutableCondition; import org.elasticsearch.xpack.watcher.condition.NeverCondition; import org.elasticsearch.xpack.watcher.condition.ScriptCondition; import org.elasticsearch.xpack.watcher.input.ExecutableInput; @@ -190,7 +190,7 @@ public class WatchTests extends ESTestCase { ExecutableInput input = randomInput(); InputRegistry inputRegistry = registry(input.type()); - Condition condition = AlwaysConditionTests.randomCondition(scriptService); + ExecutableCondition condition = AlwaysConditionTests.randomCondition(scriptService); ConditionRegistry conditionRegistry = conditionRegistry(); ExecutableTransform transform = randomTransform(); @@ -212,7 +212,7 @@ public class WatchTests extends ESTestCase { BytesReference bytes = jsonBuilder().value(watch).bytes(); logger.info("{}", bytes.utf8ToString()); - Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, clock); + WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, clock); Watch parsedWatch = watchParser.parse("_name", includeStatus, bytes, XContentType.JSON); @@ -255,7 +255,7 @@ public class WatchTests extends ESTestCase { } WatchStatus watchStatus = new WatchStatus(new DateTime(clock.millis()), unmodifiableMap(actionsStatuses)); - Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, clock); + WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, clock); XContentBuilder builder = jsonBuilder().startObject().startObject("trigger").endObject().field("status", watchStatus).endObject(); Watch watch = watchParser.parse("foo", true, builder.bytes(), XContentType.JSON); assertThat(watch.status().state().getTimestamp().getMillis(), is(clock.millis())); @@ -283,7 +283,7 @@ public class WatchTests extends ESTestCase { .startObject() .startArray("actions").endArray() .endObject(); - Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, clock); + WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, clock); try { watchParser.parse("failure", false, jsonBuilder.bytes(), XContentType.JSON); fail("This watch should fail to parse as actions is an array"); @@ -305,16 +305,16 @@ public class WatchTests extends ESTestCase { XContentBuilder builder = jsonBuilder(); builder.startObject(); - builder.startObject(Watch.Field.TRIGGER.getPreferredName()) + builder.startObject(WatchField.TRIGGER.getPreferredName()) .field(ScheduleTrigger.TYPE, schedule(schedule).build()) .endObject(); builder.endObject(); - Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); + WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); Watch watch = watchParser.parse("failure", false, builder.bytes(), XContentType.JSON); assertThat(watch, notNullValue()); assertThat(watch.trigger(), instanceOf(ScheduleTrigger.class)); assertThat(watch.input(), instanceOf(ExecutableNoneInput.class)); - assertThat(watch.condition(), instanceOf(AlwaysCondition.class)); + assertThat(watch.condition(), instanceOf(InternalAlwaysCondition.class)); assertThat(watch.transform(), nullValue()); assertThat(watch.actions(), notNullValue()); assertThat(watch.actions().size(), is(0)); @@ -330,7 +330,7 @@ public class WatchTests extends ESTestCase { InputRegistry inputRegistry = registry(SearchInput.TYPE); TransformRegistry transformRegistry = transformRegistry(); ActionRegistry actionRegistry = registry(Collections.emptyList(), conditionRegistry, transformRegistry); - Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); + WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); WatcherSearchTemplateService searchTemplateService = new WatcherSearchTemplateService(settings, scriptService, xContentRegistry()); @@ -394,7 +394,7 @@ public class WatchTests extends ESTestCase { .endObject().endObject(); builder.endObject(); - Watch.Parser parser = createWatchparser(); + WatchParser parser = createWatchparser(); Watch watch = parser.parse("_id", false, builder.bytes(), XContentType.JSON); assertThat(watch, is(notNullValue())); assertThat(watch.input().type(), is(NoneInput.TYPE)); @@ -410,7 +410,7 @@ public class WatchTests extends ESTestCase { builder.startObject("condition").startObject("always").endObject().endObject(); builder.endObject(); - Watch.Parser parser = createWatchparser(); + WatchParser parser = createWatchparser(); Watch watch = parser.parse("_id", false, builder.bytes(), XContentType.JSON); assertThat(watch, is(notNullValue())); assertThat(watch.actions(), hasSize(0)); @@ -428,14 +428,14 @@ public class WatchTests extends ESTestCase { .endObject().endObject(); builder.endObject(); - Watch.Parser parser = createWatchparser(); + WatchParser parser = createWatchparser(); ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> parser.parse("_id", false, builder.bytes(), XContentType.JSON)); assertThat(e.getMessage(), is("could not parse watch [_id]. missing required field [trigger]")); } } - private Watch.Parser createWatchparser() throws Exception { + private WatchParser createWatchparser() throws Exception { LoggingAction loggingAction = new LoggingAction(new TextTemplate("foo"), null, null); List actions = Collections.singletonList(new ActionWrapper("_logging_", randomThrottler(), null, null, new ExecutableLoggingAction(loggingAction, logger, settings, new MockTextTemplateEngine()))); @@ -450,7 +450,7 @@ public class WatchTests extends ESTestCase { TransformRegistry transformRegistry = transformRegistry(); ActionRegistry actionRegistry = registry(actions, conditionRegistry, transformRegistry); - return new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); + return new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); } private static Schedule randomSchedule() { @@ -533,7 +533,7 @@ public class WatchTests extends ESTestCase { private ConditionRegistry conditionRegistry() { Map parsers = new HashMap<>(); - parsers.put(AlwaysCondition.TYPE, (c, id, p) -> AlwaysCondition.parse(id, p)); + parsers.put(InternalAlwaysCondition.TYPE, (c, id, p) -> InternalAlwaysCondition.parse(id, p)); parsers.put(NeverCondition.TYPE, (c, id, p) -> NeverCondition.parse(id, p)); parsers.put(ArrayCompareCondition.TYPE, (c, id, p) -> ArrayCompareCondition.parse(c, id, p)); parsers.put(CompareCondition.TYPE, (c, id, p) -> CompareCondition.parse(c, id, p)); diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 7aa6ce89777..d93935c9c4d 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.security.support.IndexLifecycleManager; import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction; import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; -import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath; import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger; @@ -358,7 +358,7 @@ public class FullClusterRestartIT extends ESRestTestCase { private void assertBasicWatchInteractions() throws Exception { String watch = new WatchSourceBuilder() - .condition(AlwaysCondition.INSTANCE) + .condition(InternalAlwaysCondition.INSTANCE) .trigger(ScheduleTrigger.builder(new IntervalSchedule(IntervalSchedule.Interval.seconds(1)))) .addAction("awesome", LoggingAction.builder(new TextTemplate("test"))).buildAsBytes(XContentType.JSON).utf8ToString(); Map put = toMap(client().performRequest("PUT", "_xpack/watcher/watch/new_watch", emptyMap(), diff --git a/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/actions/20_jira.yml b/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/actions/20_jira.yml index 69411086fbc..631f3d01070 100644 --- a/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/actions/20_jira.yml +++ b/qa/smoke-test-watcher/src/test/resources/rest-api-spec/test/actions/20_jira.yml @@ -207,7 +207,7 @@ - is_false: hits.hits.0._source.result.actions.0.jira.fields.issuetype.name - match: { hits.hits.0._source.result.actions.0.jira.fields.project.key: "XWT" } - match: { hits.hits.0._source.result.actions.0.jira.fields.labels.0: "integration-tests" } - - match: { hits.hits.0._source.result.actions.0.jira.reason: "Bad Request - Field [issuetype] has error [issue type is required]\n" } + - match: { hits.hits.0._source.result.actions.0.jira.reason: "Bad Request - ThrottlerField [issuetype] has error [issue type is required]\n" } - match: { hits.hits.0._source.result.actions.0.jira.request.method: "post" } - match: { hits.hits.0._source.result.actions.0.jira.request.path: "/rest/api/2/issue" } - match: { hits.hits.0._source.result.actions.0.jira.request.auth.basic.username: "xpack-user@elastic.co" } diff --git a/qa/sql/security/build.gradle b/qa/sql/security/build.gradle index c3c842b61d2..7dd0ce96271 100644 --- a/qa/sql/security/build.gradle +++ b/qa/sql/security/build.gradle @@ -23,6 +23,7 @@ subprojects { testCompile(project(path: ':x-pack-elasticsearch:plugin', configuration: 'runtime')) { transitive = false } + testCompile project(':x-pack-elasticsearch:plugin:core') } integTestCluster { diff --git a/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java b/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java index 7361b3292c8..d614fdcdc67 100644 --- a/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java +++ b/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.client; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.XPackClient; diff --git a/qa/tribe-tests-with-license/src/test/java/org/elasticsearch/license/TribeTransportTestCase.java b/qa/tribe-tests-with-license/src/test/java/org/elasticsearch/license/TribeTransportTestCase.java index 9a038b48c8b..9f8488b33e3 100644 --- a/qa/tribe-tests-with-license/src/test/java/org/elasticsearch/license/TribeTransportTestCase.java +++ b/qa/tribe-tests-with-license/src/test/java/org/elasticsearch/license/TribeTransportTestCase.java @@ -35,6 +35,7 @@ import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.tribe.TribePlugin; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.XpackField; import org.elasticsearch.xpack.ml.MachineLearning; import java.nio.file.Path; @@ -63,11 +64,11 @@ public abstract class TribeTransportTestCase extends ESIntegTestCase { .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put("transport.type", getTestTransportType()); List enabledFeatures = enabledFeatures(); - builder.put(XPackSettings.SECURITY_ENABLED.getKey(), enabledFeatures.contains(XPackPlugin.SECURITY)); - builder.put(XPackSettings.MONITORING_ENABLED.getKey(), enabledFeatures.contains(XPackPlugin.MONITORING)); - builder.put(XPackSettings.WATCHER_ENABLED.getKey(), enabledFeatures.contains(XPackPlugin.WATCHER)); - builder.put(XPackSettings.GRAPH_ENABLED.getKey(), enabledFeatures.contains(XPackPlugin.GRAPH)); - builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), enabledFeatures.contains(XPackPlugin.MACHINE_LEARNING)); + builder.put(XPackSettings.SECURITY_ENABLED.getKey(), enabledFeatures.contains(XpackField.SECURITY)); + builder.put(XPackSettings.MONITORING_ENABLED.getKey(), enabledFeatures.contains(XpackField.MONITORING)); + builder.put(XPackSettings.WATCHER_ENABLED.getKey(), enabledFeatures.contains(XpackField.WATCHER)); + builder.put(XPackSettings.GRAPH_ENABLED.getKey(), enabledFeatures.contains(XpackField.GRAPH)); + builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), enabledFeatures.contains(XpackField.MACHINE_LEARNING)); builder.put(MachineLearning.AUTODETECT_PROCESS.getKey(), false); return builder.build(); }