diff --git a/plugin/build.gradle b/plugin/build.gradle index f99cb4d5d25..3bc33e2ce22 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -258,6 +258,7 @@ integTestCluster { setting 'xpack.security.transport.ssl.enabled', 'true' setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name setting 'xpack.security.transport.ssl.verification_mode', 'certificate' + setting 'xpack.security.audit.enabled', 'true' keystoreSetting 'bootstrap.password', 'x-pack-test-password' keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' distribution = 'zip' // this is important since we use the reindex module in ML diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ClientHelper.java b/plugin/src/main/java/org/elasticsearch/xpack/ClientHelper.java new file mode 100644 index 00000000000..8878f44817f --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ClientHelper.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ContextPreservingActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.FilterClient; +import org.elasticsearch.common.util.concurrent.ThreadContext; + +import java.util.function.BiConsumer; +import java.util.function.Supplier; + +/** + * Utility class to help with the execution of requests made using a {@link Client} such that they + * have the origin as a transient and listeners have the appropriate context upon invocation + */ +public final class ClientHelper { + + public static final String ACTION_ORIGIN_TRANSIENT_NAME = "action.origin"; + public static final String SECURITY_ORIGIN = "security"; + public static final String WATCHER_ORIGIN = "watcher"; + public static final String ML_ORIGIN = "ml"; + public static final String MONITORING_ORIGIN = "monitoring"; + public static final String DEPRECATION_ORIGIN = "deprecation"; + public static final String PERSISTENT_TASK_ORIGIN = "persistent_tasks"; + + private ClientHelper() {} + + /** + * Stashes the current context and sets the origin in the current context. The original context is returned as a stored context + */ + public static ThreadContext.StoredContext stashWithOrigin(ThreadContext threadContext, String origin) { + final ThreadContext.StoredContext storedContext = threadContext.stashContext(); + threadContext.putTransient(ACTION_ORIGIN_TRANSIENT_NAME, origin); + return storedContext; + } + + /** + * Returns a client that will always set the appropriate origin and ensure the proper context is restored by listeners + */ + public static Client clientWithOrigin(Client client, String origin) { + return new ClientWithOrigin(client, origin); + } + + /** + * Executes a consumer after setting the origin and wrapping the listener so that the proper context is restored + */ + public static void executeAsyncWithOrigin( + ThreadContext threadContext, String origin, Request request, ActionListener listener, + BiConsumer> consumer) { + final Supplier supplier = threadContext.newRestorableContext(false); + try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, origin)) { + consumer.accept(request, new ContextPreservingActionListener<>(supplier, listener)); + } + } + + /** + * Executes an asynchronous action using the provided client. The origin is set in the context and the listener + * is wrapped to ensure the proper context is restored + */ + public static > void executeAsyncWithOrigin( + Client client, String origin, Action action, Request request, + ActionListener listener) { + final ThreadContext threadContext = client.threadPool().getThreadContext(); + final Supplier supplier = threadContext.newRestorableContext(false); + try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, origin)) { + client.execute(action, request, new ContextPreservingActionListener<>(supplier, listener)); + } + } + + private static final class ClientWithOrigin extends FilterClient { + + private final String origin; + + private ClientWithOrigin(Client in, String origin) { + super(in); + this.origin = origin; + } + + @Override + protected > void doExecute( + Action action, Request request, ActionListener listener) { + final Supplier supplier = in().threadPool().getThreadContext().newRestorableContext(false); + try (ThreadContext.StoredContext ignore = in().threadPool().getThreadContext().stashContext()) { + in().threadPool().getThreadContext().putTransient(ACTION_ORIGIN_TRANSIENT_NAME, origin); + super.doExecute(action, request, new ContextPreservingActionListener<>(supplier, listener)); + } + } + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/XPackPlugin.java b/plugin/src/main/java/org/elasticsearch/xpack/XPackPlugin.java index 4b3ff172b01..f4a9c551ec7 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/XPackPlugin.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/XPackPlugin.java @@ -80,7 +80,6 @@ import org.elasticsearch.xpack.persistent.StartPersistentTaskAction; import org.elasticsearch.xpack.persistent.UpdatePersistentTaskStatusAction; import org.elasticsearch.xpack.rest.action.RestXPackInfoAction; import org.elasticsearch.xpack.rest.action.RestXPackUsageAction; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; @@ -249,9 +248,6 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I List components = new ArrayList<>(); components.add(sslService); - final InternalClient internalClient = new InternalClient(settings, threadPool, client); - components.add(internalClient); - LicenseService licenseService = new LicenseService(settings, clusterService, getClock(), env, resourceWatcherService, licenseState); components.add(licenseService); @@ -263,20 +259,18 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I } catch (final Exception e) { throw new IllegalStateException("security initialization failed", e); } - components.addAll(monitoring.createComponents(internalClient, threadPool, clusterService, licenseService, sslService)); + components.addAll(monitoring.createComponents(client, threadPool, clusterService, licenseService, sslService)); - components.addAll(watcher.createComponents(getClock(), scriptService, internalClient, licenseState, threadPool, clusterService, + components.addAll(watcher.createComponents(getClock(), scriptService, client, licenseState, threadPool, clusterService, xContentRegistry, sslService)); - PersistentTasksService persistentTasksService = new PersistentTasksService(settings, clusterService, threadPool, internalClient); + PersistentTasksService persistentTasksService = new PersistentTasksService(settings, clusterService, threadPool, client); - components.addAll(machineLearning.createComponents(internalClient, clusterService, threadPool, xContentRegistry, + components.addAll(machineLearning.createComponents(client, clusterService, threadPool, xContentRegistry, persistentTasksService)); List> tasksExecutors = new ArrayList<>(); tasksExecutors.addAll(machineLearning.createPersistentTasksExecutors(clusterService)); - components.addAll(logstash.createComponents(internalClient, clusterService)); - components.addAll(upgrade.createComponents(client, clusterService, threadPool, resourceWatcherService, scriptService, xContentRegistry)); @@ -451,6 +445,8 @@ public class XPackPlugin extends Plugin implements ScriptPlugin, ActionPlugin, I return templates -> { templates = watcher.getIndexTemplateMetaDataUpgrader().apply(templates); templates = security.getIndexTemplateMetaDataUpgrader().apply(templates); + templates = logstash.getIndexTemplateMetaDataUpgrader().apply(templates); + templates = machineLearning.getIndexTemplateMetaDataUpgrader().apply(templates); return templates; }; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 5dc032ef370..9fbbf892298 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.deprecation; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; @@ -23,6 +22,7 @@ import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBui import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -34,17 +34,14 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.node.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.deprecation.DeprecationIssue.Level; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.Arrays; @@ -52,11 +49,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xpack.ClientHelper.DEPRECATION_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.CLUSTER_SETTINGS_CHECKS; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; import static org.elasticsearch.xpack.deprecation.DeprecationChecks.NODE_SETTINGS_CHECKS; @@ -281,14 +279,14 @@ public class DeprecationInfoAction extends Action { private final XPackLicenseState licenseState; - private final InternalClient client; + private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - XPackLicenseState licenseState, InternalClient client) { + XPackLicenseState licenseState, NodeClient client) { super(settings, DeprecationInfoAction.NAME, transportService, clusterService, threadPool, actionFilters, Request::new, indexNameExpressionResolver); this.licenseState = licenseState; @@ -318,22 +316,26 @@ public class DeprecationInfoAction extends Action { - if (nodesInfoResponse.hasFailures()) { - throw nodesInfoResponse.failures().get(0); - } - client.admin().cluster().nodesStats(nodesStatsRequest, ActionListener.wrap( - nodesStatsResponse -> { - if (nodesStatsResponse.hasFailures()) { - throw nodesStatsResponse.failures().get(0); - } - listener.onResponse(Response.from(nodesInfoResponse.getNodes(), - nodesStatsResponse.getNodes(), state, indexNameExpressionResolver, - request.indices(), request.indicesOptions(), CLUSTER_SETTINGS_CHECKS, - NODE_SETTINGS_CHECKS, INDEX_SETTINGS_CHECKS)); - }, listener::onFailure)); - },listener::onFailure)); + final ThreadContext threadContext = client.threadPool().getThreadContext(); + executeAsyncWithOrigin(threadContext, DEPRECATION_ORIGIN, nodesInfoRequest, ActionListener.wrap( + nodesInfoResponse -> { + if (nodesInfoResponse.hasFailures()) { + throw nodesInfoResponse.failures().get(0); + } + executeAsyncWithOrigin(threadContext, DEPRECATION_ORIGIN, nodesStatsRequest, + ActionListener.wrap( + nodesStatsResponse -> { + if (nodesStatsResponse.hasFailures()) { + throw nodesStatsResponse.failures().get(0); + } + listener.onResponse(Response.from(nodesInfoResponse.getNodes(), + nodesStatsResponse.getNodes(), state, indexNameExpressionResolver, + request.indices(), request.indicesOptions(), + CLUSTER_SETTINGS_CHECKS, NODE_SETTINGS_CHECKS, + INDEX_SETTINGS_CHECKS)); + }, listener::onFailure), + client.admin().cluster()::nodesStats); + }, listener::onFailure), client.admin().cluster()::nodesInfo); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.DEPRECATION)); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java b/plugin/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java index 9a232052830..540f5b6d6a1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java @@ -5,18 +5,22 @@ */ package org.elasticsearch.xpack.logstash; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.security.InternalClient; +import org.elasticsearch.xpack.template.TemplateUtils; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.function.UnaryOperator; +import java.util.regex.Pattern; /** * This class activates/deactivates the logstash modules depending if we're running a node client or transport client @@ -24,13 +28,14 @@ import java.util.List; public class Logstash implements ActionPlugin { public static final String NAME = "logstash"; + private static final String LOGSTASH_TEMPLATE_NAME = "logstash-index-template"; + private static final String TEMPLATE_VERSION_PATTERN = + Pattern.quote("${logstash.template.version}"); - private final Settings settings; private final boolean enabled; private final boolean transportClientMode; public Logstash(Settings settings) { - this.settings = settings; this.enabled = XPackSettings.LOGSTASH_ENABLED.get(settings); this.transportClientMode = XPackPlugin.transportClientMode(settings); } @@ -51,11 +56,11 @@ public class Logstash implements ActionPlugin { return modules; } - public Collection createComponents(InternalClient client, ClusterService clusterService) { - if (this.transportClientMode || enabled == false) { - return Collections.emptyList(); - } - - return Collections.singletonList(new LogstashTemplateRegistry(settings, clusterService, client)); + public UnaryOperator> getIndexTemplateMetaDataUpgrader() { + return templates -> { + TemplateUtils.loadTemplateIntoMap("/" + LOGSTASH_TEMPLATE_NAME + ".json", templates, LOGSTASH_TEMPLATE_NAME, + Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN, Loggers.getLogger(Logstash.class)); + return templates; + }; } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/logstash/LogstashTemplateRegistry.java b/plugin/src/main/java/org/elasticsearch/xpack/logstash/LogstashTemplateRegistry.java deleted file mode 100644 index cb37640cd86..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/logstash/LogstashTemplateRegistry.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.logstash; - -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.xpack.template.TemplateUtils; - -import java.nio.charset.StandardCharsets; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.regex.Pattern; - -/** - * Registry for the Logstash index template and settings - * This class is based on xpack.security.SecurityLifecycleService. - */ -public class LogstashTemplateRegistry extends AbstractComponent implements ClusterStateListener { - - public static final String LOGSTASH_INDEX_NAME = ".logstash"; - public static final String LOGSTASH_TEMPLATE_NAME = "logstash-index-template"; - public static final String TEMPLATE_VERSION_PATTERN = - Pattern.quote("${logstash.template.version}"); - - private static final String LOGSTASH_VERSION_PROPERTY = "logstash-version"; - - private final Client client; - - private final AtomicBoolean templateIsUpToDate = new AtomicBoolean(false); - - // only put the template if this is not already in progress - private final AtomicBoolean templateCreationPending = new AtomicBoolean(false); - - public LogstashTemplateRegistry(Settings settings, ClusterService clusterService, Client client) { - super(settings); - this.client = client; - clusterService.addListener(this); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.localNodeMaster()) { - - // wait until the gateway has recovered from disk, - // otherwise we think may not have the index templates while they actually do exist - if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { - addTemplatesIfMissing(event.state()); - } - } - } - - public boolean isTemplateUpToDate() { - return templateIsUpToDate.get(); - } - - public boolean isTemplateCreationPending() { - return templateCreationPending.get(); - } - - private void addTemplatesIfMissing(ClusterState state) { - this.templateIsUpToDate.set(TemplateUtils.checkTemplateExistsAndIsUpToDate(LOGSTASH_TEMPLATE_NAME, - LOGSTASH_VERSION_PROPERTY, state, logger)); - - // only put the template if its not up to date and if its not already in progress - if (isTemplateUpToDate() == false && templateCreationPending.compareAndSet(false, true)) { - putTemplate(); - } - } - - private void putTemplate() { - logger.debug("putting the template [{}]", LOGSTASH_TEMPLATE_NAME); - String template = TemplateUtils.loadTemplate("/" + LOGSTASH_TEMPLATE_NAME + ".json", - Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN); - - PutIndexTemplateRequest putTemplateRequest = client.admin().indices() - .preparePutTemplate(LOGSTASH_TEMPLATE_NAME) - .setSource( - new BytesArray(template.getBytes(StandardCharsets.UTF_8)), - XContentType.JSON) - .request(); - - client.admin().indices().putTemplate(putTemplateRequest, ActionListener.wrap(r -> { - templateCreationPending.set(false); - if (r.isAcknowledged()) { - templateIsUpToDate.set(true); - logger.debug("successfully updated [{}] index template", LOGSTASH_TEMPLATE_NAME); - } else { - logger.error("put template [{}] was not acknowledged", LOGSTASH_TEMPLATE_NAME); - } - }, e -> { - templateCreationPending.set(false); - logger.warn(new ParameterizedMessage( - "failed to put template [{}]", LOGSTASH_TEMPLATE_NAME), e); - })); - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 8327d5e4607..05b1f41357b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -5,18 +5,26 @@ */ package org.elasticsearch.xpack.ml; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -27,7 +35,9 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.monitor.os.OsProbe; import org.elasticsearch.monitor.os.OsStats; @@ -84,6 +94,8 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.UpdateJobProcessNotifier; import org.elasticsearch.xpack.ml.job.config.JobTaskStatus; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; @@ -99,6 +111,7 @@ import org.elasticsearch.xpack.ml.job.process.normalizer.MultiplyingNormalizerPr import org.elasticsearch.xpack.ml.job.process.normalizer.NativeNormalizerProcessFactory; import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerFactory; import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerProcessFactory; +import org.elasticsearch.xpack.ml.notifications.AuditMessage; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.rest.RestDeleteExpiredDataAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestDeleteDatafeedAction; @@ -138,7 +151,7 @@ import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.persistent.PersistentTasksExecutor; import org.elasticsearch.xpack.persistent.PersistentTasksNodeService; import org.elasticsearch.xpack.persistent.PersistentTasksService; -import org.elasticsearch.xpack.security.InternalClient; +import org.elasticsearch.xpack.template.TemplateUtils; import java.io.IOException; import java.math.BigInteger; @@ -147,7 +160,9 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.function.Supplier; +import java.util.function.UnaryOperator; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.XPackPlugin.MACHINE_LEARNING; @@ -175,6 +190,8 @@ public class MachineLearning implements ActionPlugin { public static final TimeValue STATE_PERSIST_RESTORE_TIMEOUT = TimeValue.timeValueMinutes(30); + private static final Logger logger = Loggers.getLogger(XPackPlugin.class); + private final Settings settings; private final Environment env; private final XPackLicenseState licenseState; @@ -310,19 +327,19 @@ public class MachineLearning implements ActionPlugin { ); } - public Collection createComponents(InternalClient internalClient, ClusterService clusterService, ThreadPool threadPool, + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, NamedXContentRegistry xContentRegistry, PersistentTasksService persistentTasksService) { if (enabled == false || transportClientMode || tribeNode || tribeNodeClient) { return emptyList(); } - Auditor auditor = new Auditor(internalClient, clusterService); - JobProvider jobProvider = new JobProvider(internalClient, settings); - UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(settings, internalClient, clusterService, threadPool); - JobManager jobManager = new JobManager(settings, jobProvider, clusterService, auditor, internalClient, notifier); + Auditor auditor = new Auditor(client, clusterService); + JobProvider jobProvider = new JobProvider(client, settings); + UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(settings, client, clusterService, threadPool); + JobManager jobManager = new JobManager(settings, jobProvider, clusterService, auditor, client, notifier); - JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(settings, internalClient); - JobResultsPersister jobResultsPersister = new JobResultsPersister(settings, internalClient); + JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(settings, client); + JobResultsPersister jobResultsPersister = new JobResultsPersister(settings, client); AutodetectProcessFactory autodetectProcessFactory; NormalizerProcessFactory normalizerProcessFactory; @@ -333,7 +350,7 @@ public class MachineLearning implements ActionPlugin { // This will only only happen when path.home is not set, which is disallowed in production throw new ElasticsearchException("Failed to create native process controller for Machine Learning"); } - autodetectProcessFactory = new NativeAutodetectProcessFactory(env, settings, nativeController, internalClient); + autodetectProcessFactory = new NativeAutodetectProcessFactory(env, settings, nativeController, client); normalizerProcessFactory = new NativeNormalizerProcessFactory(env, settings, nativeController); } catch (IOException e) { // This also should not happen in production, as the MachineLearningFeatureSet should have @@ -349,12 +366,12 @@ public class MachineLearning implements ActionPlugin { } NormalizerFactory normalizerFactory = new NormalizerFactory(normalizerProcessFactory, threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME)); - AutodetectProcessManager autodetectProcessManager = new AutodetectProcessManager(settings, internalClient, threadPool, + AutodetectProcessManager autodetectProcessManager = new AutodetectProcessManager(settings, client, threadPool, jobManager, jobProvider, jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory, xContentRegistry, auditor); this.autodetectProcessManager.set(autodetectProcessManager); - DatafeedJobBuilder datafeedJobBuilder = new DatafeedJobBuilder(internalClient, jobProvider, auditor, System::currentTimeMillis); - DatafeedManager datafeedManager = new DatafeedManager(threadPool, internalClient, clusterService, datafeedJobBuilder, + DatafeedJobBuilder datafeedJobBuilder = new DatafeedJobBuilder(client, jobProvider, auditor, System::currentTimeMillis); + DatafeedManager datafeedManager = new DatafeedManager(threadPool, client, clusterService, datafeedJobBuilder, System::currentTimeMillis, auditor, persistentTasksService); this.datafeedManager.set(datafeedManager); MlLifeCycleService mlLifeCycleService = new MlLifeCycleService(env, clusterService, datafeedManager, autodetectProcessManager); @@ -366,8 +383,7 @@ public class MachineLearning implements ActionPlugin { jobProvider, jobManager, autodetectProcessManager, - new MachineLearningTemplateRegistry(settings, clusterService, internalClient, threadPool), - new MlInitializationService(settings, threadPool, clusterService, internalClient), + new MlInitializationService(settings, threadPool, clusterService, client), jobDataCountsPersister, datafeedManager, auditor, @@ -516,6 +532,100 @@ public class MachineLearning implements ActionPlugin { return Arrays.asList(autoDetect, renormalizer, datafeed); } + public UnaryOperator> getIndexTemplateMetaDataUpgrader() { + return templates -> { + final TimeValue delayedNodeTimeOutSetting; + // Whether we are using native process is a good way to detect whether we are in dev / test mode: + if (MachineLearning.AUTODETECT_PROCESS.get(settings)) { + delayedNodeTimeOutSetting = UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(settings); + } else { + delayedNodeTimeOutSetting = TimeValue.timeValueNanos(0); + } + + try (XContentBuilder auditMapping = ElasticsearchMappings.auditMessageMapping()) { + IndexTemplateMetaData notificationMessageTemplate = IndexTemplateMetaData.builder(Auditor.NOTIFICATIONS_INDEX) + .putMapping(AuditMessage.TYPE.getPreferredName(), auditMapping.string()) + .patterns(Collections.singletonList(Auditor.NOTIFICATIONS_INDEX)) + .version(Version.CURRENT.id) + .settings(Settings.builder() + // Our indexes are small and one shard puts the + // least possible burden on Elasticsearch + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)) + .build(); + templates.put(Auditor.NOTIFICATIONS_INDEX, notificationMessageTemplate); + } catch (IOException e) { + logger.warn("Error loading the template for the notification message index", e); + } + + try (XContentBuilder docMapping = MlMetaIndex.docMapping()) { + IndexTemplateMetaData metaTemplate = IndexTemplateMetaData.builder(MlMetaIndex.INDEX_NAME) + .patterns(Collections.singletonList(MlMetaIndex.INDEX_NAME)) + .settings(Settings.builder() + // Our indexes are small and one shard puts the + // least possible burden on Elasticsearch + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)) + .version(Version.CURRENT.id) + .putMapping(MlMetaIndex.TYPE, docMapping.string()) + .build(); + templates.put(MlMetaIndex.INDEX_NAME, metaTemplate); + } catch (IOException e) { + logger.warn("Error loading the template for the " + MlMetaIndex.INDEX_NAME + " index", e); + } + + try (XContentBuilder stateMapping = ElasticsearchMappings.stateMapping()) { + IndexTemplateMetaData stateTemplate = IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobStateIndexName()) + .patterns(Collections.singletonList(AnomalyDetectorsIndex.jobStateIndexName())) + // TODO review these settings + .settings(Settings.builder() + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting) + // Sacrifice durability for performance: in the event of power + // failure we can lose the last 5 seconds of changes, but it's + // much faster + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async")) + .putMapping(ElasticsearchMappings.DOC_TYPE, stateMapping.string()) + .version(Version.CURRENT.id) + .build(); + templates.put(AnomalyDetectorsIndex.jobStateIndexName(), stateTemplate); + } catch (IOException e) { + logger.error("Error loading the template for the " + AnomalyDetectorsIndex.jobStateIndexName() + " index", e); + } + + try (XContentBuilder docMapping = ElasticsearchMappings.docMapping()) { + IndexTemplateMetaData jobResultsTemplate = IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobResultsIndexPrefix()) + .patterns(Collections.singletonList(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*")) + .settings(Settings.builder() + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting) + // Sacrifice durability for performance: in the event of power + // failure we can lose the last 5 seconds of changes, but it's + // much faster + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async") + // set the default all search field + .put(IndexSettings.DEFAULT_FIELD_SETTING.getKey(), ElasticsearchMappings.ALL_FIELD_VALUES)) + .putMapping(ElasticsearchMappings.DOC_TYPE, docMapping.string()) + .version(Version.CURRENT.id) + .build(); + templates.put(AnomalyDetectorsIndex.jobResultsIndexPrefix(), jobResultsTemplate); + } catch (IOException e) { + logger.error("Error loading the template for the " + AnomalyDetectorsIndex.jobResultsIndexPrefix() + " indices", e); + } + + return templates; + }; + } + + public static boolean allTemplatesInstalled(ClusterState clusterState) { + boolean allPresent = true; + List templateNames = Arrays.asList(Auditor.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, + AnomalyDetectorsIndex.jobStateIndexName(), AnomalyDetectorsIndex.jobResultsIndexPrefix()); + for (String templateName : templateNames) { + allPresent = allPresent && TemplateUtils.checkTemplateExistsAndVersionIsGTECurrentVersion(templateName, clusterState); + } + + return allPresent; + } + /** * Find the memory size (in bytes) of the machine this node is running on. * Takes container limits (as used by Docker for example) into account. diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearningTemplateRegistry.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearningTemplateRegistry.java deleted file mode 100644 index 2de949f4d95..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearningTemplateRegistry.java +++ /dev/null @@ -1,315 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml; - -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.gateway.GatewayService; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.ml.notifications.AuditMessage; -import org.elasticsearch.xpack.ml.notifications.Auditor; - -import java.io.IOException; -import java.util.Collections; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.BiConsumer; - -/** - * Registry for the ML index templates and settings - */ -public class MachineLearningTemplateRegistry extends AbstractComponent implements ClusterStateListener { - private static final String ASYNC = "async"; - - private final Client client; - private final ThreadPool threadPool; - - public static final String [] TEMPLATE_NAMES = new String [] {Auditor.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, - AnomalyDetectorsIndex.jobStateIndexName(), AnomalyDetectorsIndex.jobResultsIndexPrefix()}; - - final AtomicBoolean putMlNotificationsIndexTemplateCheck = new AtomicBoolean(false); - final AtomicBoolean putMlMetaIndexTemplateCheck = new AtomicBoolean(false); - final AtomicBoolean putStateIndexTemplateCheck = new AtomicBoolean(false); - final AtomicBoolean putResultsIndexTemplateCheck = new AtomicBoolean(false); - - // Allows us in test mode to disable the delay of shard allocation, so that in tests we don't have to wait for - // for at least a minute for shards to get allocated. - private final TimeValue delayedNodeTimeOutSetting; - - public MachineLearningTemplateRegistry(Settings settings, ClusterService clusterService, Client client, - ThreadPool threadPool) { - super(settings); - this.client = client; - this.threadPool = threadPool; - // Whether we are using native process is a good way to detect whether we are in dev / test mode: - if (MachineLearning.AUTODETECT_PROCESS.get(settings)) { - delayedNodeTimeOutSetting = UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(settings); - } else { - delayedNodeTimeOutSetting = TimeValue.timeValueNanos(0); - } - - clusterService.addListener(this); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.localNodeMaster()) { - - // wait until the gateway has recovered from disk, - // otherwise we think may not have the index templates while they actually do exist - if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { - addTemplatesIfMissing(event.state()); - } - } - } - - /** - * Puts the registered index templates if missing to the - * cluster waiting until the templates have been updated. - */ - public void addTemplatesIfMissing(ClusterState state) { - MetaData metaData = state.metaData(); - addMlNotificationsIndexTemplate(metaData); - addMlMetaIndexTemplate(metaData); - addStateIndexTemplate(metaData); - addResultsIndexTemplate(metaData); - } - - static boolean templateIsPresentAndUpToDate(String templateName, MetaData metaData) { - IndexTemplateMetaData templateMetaData = metaData.templates().get(templateName); - if (templateMetaData == null) { - return false; - } - - return templateMetaData.version() != null && templateMetaData.version() >= Version.CURRENT.id; - } - - private void addMlNotificationsIndexTemplate(MetaData metaData) { - if (putMlNotificationsIndexTemplateCheck.compareAndSet(false, true)) { - if (templateIsPresentAndUpToDate(Auditor.NOTIFICATIONS_INDEX, metaData) == false) { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - putNotificationMessageIndexTemplate((result, error) -> { - putMlNotificationsIndexTemplateCheck.set(false); - if (result) { - logger.info("successfully created {} index template", Auditor.NOTIFICATIONS_INDEX); - } else { - logger.error( - new ParameterizedMessage("not able to create {} index template", Auditor.NOTIFICATIONS_INDEX), error); - } - }); - }); - } else { - putMlNotificationsIndexTemplateCheck.set(false); - } - } - } - - private void addMlMetaIndexTemplate(MetaData metaData) { - if (putMlMetaIndexTemplateCheck.compareAndSet(false, true)) { - if (templateIsPresentAndUpToDate(MlMetaIndex.INDEX_NAME, metaData) == false) { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - putMetaIndexTemplate((result, error) -> { - putMlMetaIndexTemplateCheck.set(false); - if (result) { - logger.info("successfully created {} index template", MlMetaIndex.INDEX_NAME); - } else { - logger.error(new ParameterizedMessage("not able to create {} index template", MlMetaIndex.INDEX_NAME), error); - } - }); - }); - } else { - putMlMetaIndexTemplateCheck.set(false); - } - } - } - - private void addStateIndexTemplate(MetaData metaData) { - String stateIndexName = AnomalyDetectorsIndex.jobStateIndexName(); - if (putStateIndexTemplateCheck.compareAndSet(false, true)) { - if (templateIsPresentAndUpToDate(stateIndexName, metaData) == false) { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - putJobStateIndexTemplate((result, error) -> { - putStateIndexTemplateCheck.set(false); - if (result) { - logger.info("successfully created {} index template", stateIndexName); - } else { - logger.error("not able to create " + stateIndexName + " index template", error); - } - }); - }); - } else { - putStateIndexTemplateCheck.set(false); - } - } - } - - private void addResultsIndexTemplate(MetaData metaData) { - if (putResultsIndexTemplateCheck.compareAndSet(false, true)) { - if (templateIsPresentAndUpToDate(AnomalyDetectorsIndex.jobResultsIndexPrefix(), metaData) == false) { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - putJobResultsIndexTemplate((result, error) -> { - putResultsIndexTemplateCheck.set(false); - if (result) { - logger.info("successfully created {} index template", AnomalyDetectorsIndex.jobResultsIndexPrefix()); - } else { - logger.error( - new ParameterizedMessage("not able to create {} index template", - AnomalyDetectorsIndex.jobResultsIndexPrefix()), error); - } - }); - }); - } else { - putResultsIndexTemplateCheck.set(false); - } - } - } - - /** - * Index template for notifications - */ - void putNotificationMessageIndexTemplate(BiConsumer listener) { - try (XContentBuilder auditMapping = ElasticsearchMappings.auditMessageMapping()) { - PutIndexTemplateRequest templateRequest = new PutIndexTemplateRequest(Auditor.NOTIFICATIONS_INDEX); - templateRequest.patterns(Collections.singletonList(Auditor.NOTIFICATIONS_INDEX)); - templateRequest.settings(mlNotificationIndexSettings()); - templateRequest.mapping(AuditMessage.TYPE.getPreferredName(), auditMapping); - templateRequest.version(Version.CURRENT.id); - client.admin().indices().putTemplate(templateRequest, - ActionListener.wrap(r -> listener.accept(true, null), e -> listener.accept(false, e))); - } catch (IOException e) { - logger.warn("Error putting the template for the notification message index", e); - listener.accept(false, - new ElasticsearchException("Error creating the template mappings for the notification message indices", e)); - } - } - - /** - * Index template for meta data - */ - void putMetaIndexTemplate(BiConsumer listener) { - PutIndexTemplateRequest templateRequest = new PutIndexTemplateRequest(MlMetaIndex.INDEX_NAME); - templateRequest.patterns(Collections.singletonList(MlMetaIndex.INDEX_NAME)); - templateRequest.settings(mlNotificationIndexSettings()); - templateRequest.version(Version.CURRENT.id); - - try (XContentBuilder docMapping = MlMetaIndex.docMapping()) { - templateRequest.mapping(MlMetaIndex.TYPE, docMapping); - } catch (IOException e) { - String msg = "Error creating template mappings for the " + MlMetaIndex.INDEX_NAME + " index"; - logger.error(msg, e); - listener.accept(false, new ElasticsearchException(msg, e)); - } - - client.admin().indices().putTemplate(templateRequest, - ActionListener.wrap(r -> listener.accept(true, null), e -> listener.accept(false, e))); - } - - void putJobStateIndexTemplate(BiConsumer listener) { - try (XContentBuilder stateMapping = ElasticsearchMappings.stateMapping()) { - PutIndexTemplateRequest templateRequest = new PutIndexTemplateRequest(AnomalyDetectorsIndex.jobStateIndexName()); - templateRequest.patterns(Collections.singletonList(AnomalyDetectorsIndex.jobStateIndexName())); - templateRequest.settings(mlStateIndexSettings()); - templateRequest.mapping(ElasticsearchMappings.DOC_TYPE, stateMapping); - templateRequest.version(Version.CURRENT.id); - - client.admin().indices().putTemplate(templateRequest, - ActionListener.wrap(r -> listener.accept(true, null), e -> listener.accept(false, e))); - } catch (IOException e) { - logger.error("Error creating template mappings for the " + AnomalyDetectorsIndex.jobStateIndexName() + " index", e); - listener.accept(false, new ElasticsearchException("Error creating template mappings for the " + - AnomalyDetectorsIndex.jobStateIndexName() + " indices", e)); - } - } - - void putJobResultsIndexTemplate(BiConsumer listener) { - try (XContentBuilder docMapping = ElasticsearchMappings.docMapping()) { - - PutIndexTemplateRequest templateRequest = new PutIndexTemplateRequest(AnomalyDetectorsIndex.jobResultsIndexPrefix()); - templateRequest.patterns(Collections.singletonList(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*")); - templateRequest.settings(mlResultsIndexSettings()); - templateRequest.mapping(ElasticsearchMappings.DOC_TYPE, docMapping); - templateRequest.version(Version.CURRENT.id); - - client.admin().indices().putTemplate(templateRequest, - ActionListener.wrap(r -> listener.accept(true, null), e -> listener.accept(false, e))); - } catch (IOException e) { - logger.error("Error creating template mappings for the " + AnomalyDetectorsIndex.jobResultsIndexPrefix() + " indices", e); - listener.accept(false, new ElasticsearchException("Error creating template mappings for the " - + AnomalyDetectorsIndex.jobResultsIndexPrefix() + " index", e)); - } - } - - /** - * Build the index settings that we want to apply to results indexes. - * - * @return Builder initialised with the desired setting for the ML results indices. - */ - Settings.Builder mlResultsIndexSettings() { - return Settings.builder() - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting) - // Sacrifice durability for performance: in the event of power - // failure we can lose the last 5 seconds of changes, but it's - // much faster - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), ASYNC) - // set the default all search field - .put(IndexSettings.DEFAULT_FIELD_SETTING.getKey(), ElasticsearchMappings.ALL_FIELD_VALUES); - } - - /** - * Settings for the notification messages index - * - * @return Builder initialised with the desired setting for the ML index. - */ - Settings.Builder mlNotificationIndexSettings() { - return Settings.builder() - // Our indexes are small and one shard puts the - // least possible burden on Elasticsearch - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting); - } - - /** - * Settings for the state index - * - * @return Builder initialised with the desired setting for the ML index. - */ - Settings.Builder mlStateIndexSettings() { - // TODO review these settings - return Settings.builder() - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting) - // Sacrifice durability for performance: in the event of power - // failure we can lose the last 5 seconds of changes, but it's - // much faster - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), ASYNC); - } - - public static boolean allTemplatesInstalled(MetaData metaData) { - boolean allPresent = true; - for (String templateName : TEMPLATE_NAMES) { - allPresent = allPresent && templateIsPresentAndUpToDate(templateName, metaData); - } - - return allPresent; - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java index 346773411e7..4a6ddb7bcfc 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.action.DeleteExpiredDataAction; import org.joda.time.DateTime; @@ -24,6 +25,10 @@ import java.util.Random; import java.util.concurrent.ScheduledFuture; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; + /** * A service that runs once a day and triggers maintenance tasks. */ @@ -107,9 +112,10 @@ public class MlDailyMaintenanceService implements Releasable { private void triggerTasks() { LOGGER.info("triggering scheduled [ML] maintenance tasks"); - client.execute(DeleteExpiredDataAction.INSTANCE, new DeleteExpiredDataAction.Request(), ActionListener.wrap( - response -> LOGGER.info("Successfully completed [ML] maintenance tasks"), - e -> LOGGER.error("An error occurred during maintenance tasks execution", e))); + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteExpiredDataAction.INSTANCE, new DeleteExpiredDataAction.Request(), + ActionListener.wrap( + response -> LOGGER.info("Successfully completed [ML] maintenance tasks"), + e -> LOGGER.error("An error occurred during maintenance tasks execution", e))); scheduleNext(); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java index 5981517094a..a19ded838d1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/CloseJobAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -53,7 +54,6 @@ import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.xpack.persistent.PersistentTasksService; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.ArrayList; @@ -66,6 +66,9 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + public class CloseJobAction extends Action { public static final CloseJobAction INSTANCE = new CloseJobAction(); @@ -305,7 +308,7 @@ public class CloseJobAction extends Action { - private final InternalClient client; + private final Client client; private final ClusterService clusterService; private final Auditor auditor; private final PersistentTasksService persistentTasksService; @@ -313,7 +316,7 @@ public class CloseJobAction extends Action() { - @Override - public void onResponse(FinalizeJobExecutionAction.Response r) { - listener.onResponse(response); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); + executeAsyncWithOrigin(client, ML_ORIGIN, FinalizeJobExecutionAction.INSTANCE, finalizeRequest, + ActionListener.wrap(r -> listener.onResponse(response), listener::onFailure)); } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java index c88b80f419e..50523d9eec9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteDatafeedAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -38,11 +38,13 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.persistent.PersistentTasksService; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + public class DeleteDatafeedAction extends Action { @@ -163,16 +165,16 @@ public class DeleteDatafeedAction extends Action { - private InternalClient client; + private Client client; private PersistentTasksService persistentTasksService; @Inject public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - InternalClient internalClient, PersistentTasksService persistentTasksService) { + Client client, PersistentTasksService persistentTasksService) { super(settings, DeleteDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, Request::new); - this.client = internalClient; + this.client = client; this.persistentTasksService = persistentTasksService; } @@ -207,7 +209,7 @@ public class DeleteDatafeedAction extends Action listener) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java index 54d0b1e9aca..fa15915af63 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteExpiredDataAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -32,7 +33,6 @@ import org.elasticsearch.xpack.ml.job.retention.ExpiredResultsRemover; import org.elasticsearch.xpack.ml.job.retention.MlDataRemover; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.utils.VolatileCursorIterator; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.Arrays; @@ -125,13 +125,13 @@ public class DeleteExpiredDataAction extends Action { - private final InternalClient client; + private final Client client; private final ClusterService clusterService; @Inject public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - InternalClient client, ClusterService clusterService) { + Client client, ClusterService clusterService) { super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); this.client = client; this.clusterService = clusterService; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java index 625f6ff355c..dadf47bee68 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -18,6 +19,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -36,7 +38,6 @@ import org.elasticsearch.xpack.ml.job.config.Detector; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.MlFilter; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.ArrayList; @@ -44,6 +45,9 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + public class DeleteFilterAction extends Action { @@ -144,14 +148,14 @@ public class DeleteFilterAction extends Action { - private final InternalClient client; + private final Client client; private final ClusterService clusterService; @Inject public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, InternalClient client) { + ClusterService clusterService, Client client) { super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); this.clusterService = clusterService; @@ -184,22 +188,23 @@ public class DeleteFilterAction extends Action() { - @Override - public void onResponse(BulkResponse bulkResponse) { - if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { - listener.onFailure(new ResourceNotFoundException("Could not delete filter with ID [" + filterId - + "] because it does not exist")); - } else { - listener.onResponse(new Response(true)); - } - } + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse bulkResponse) { + if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { + listener.onFailure(new ResourceNotFoundException("Could not delete filter with ID [" + filterId + + "] because it does not exist")); + } else { + listener.onResponse(new Response(true)); + } + } - @Override - public void onFailure(Exception e) { - logger.error("Could not delete filter with ID [" + filterId + "]", e); - listener.onFailure(new IllegalStateException("Could not delete filter with ID [" + filterId + "]", e)); - } + @Override + public void onFailure(Exception e) { + logger.error("Could not delete filter with ID [" + filterId + "]", e); + listener.onFailure(new IllegalStateException("Could not delete filter with ID [" + filterId + "]", e)); + } }); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java index 462d1c7b948..eba9421b21d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteJobAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -44,12 +45,14 @@ import org.elasticsearch.xpack.ml.job.persistence.JobStorageDeletionTask; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.persistent.PersistentTasksService; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.Objects; import java.util.concurrent.TimeoutException; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + public class DeleteJobAction extends Action { public static final DeleteJobAction INSTANCE = new DeleteJobAction(); @@ -172,17 +175,17 @@ public class DeleteJobAction extends Action { - private final InternalClient internalClient; + private final Client client; private final JobManager jobManager; private final PersistentTasksService persistentTasksService; @Inject public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - JobManager jobManager, PersistentTasksService persistentTasksService, InternalClient internalClient) { + JobManager jobManager, PersistentTasksService persistentTasksService, Client client) { super(settings, DeleteJobAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, Request::new); - this.internalClient = internalClient; + this.client = client; this.jobManager = jobManager; this.persistentTasksService = persistentTasksService; } @@ -293,7 +296,7 @@ public class DeleteJobAction extends Action listener) { KillProcessAction.Request killRequest = new KillProcessAction.Request(jobId); - internalClient.execute(KillProcessAction.INSTANCE, killRequest, listener); + executeAsyncWithOrigin(client, ML_ORIGIN, KillProcessAction.INSTANCE, killRequest, listener); } private void removePersistentTask(String jobId, ClusterState currentState, diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java index eec5ef07446..68172a3d1e3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteModelSnapshotAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -32,7 +33,6 @@ import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.Collections; @@ -130,7 +130,7 @@ public class DeleteModelSnapshotAction extends Action { - private final InternalClient client; + private final Client client; private final JobProvider jobProvider; private final ClusterService clusterService; private final Auditor auditor; @@ -138,7 +138,7 @@ public class DeleteModelSnapshotAction extends Action * This action returns summarized bucket results over multiple jobs. @@ -458,20 +462,22 @@ public class GetOverallBucketsAction maxBucketSpanMillis, jobsContext.indices); searchRequest.source().aggregation(AggregationBuilders.min(EARLIEST_TIME).field(Result.TIMESTAMP.getPreferredName())); searchRequest.source().aggregation(AggregationBuilders.max(LATEST_TIME).field(Result.TIMESTAMP.getPreferredName())); - client.search(searchRequest, ActionListener.wrap(searchResponse -> { - long totalHits = searchResponse.getHits().getTotalHits(); - if (totalHits > 0) { - Aggregations aggregations = searchResponse.getAggregations(); - Min min = aggregations.get(EARLIEST_TIME); - long earliestTime = Intervals.alignToFloor((long) min.getValue(), maxBucketSpanMillis); - Max max = aggregations.get(LATEST_TIME); - long latestTime = Intervals.alignToCeil((long) max.getValue() + 1, maxBucketSpanMillis); - listener.onResponse(new ChunkedBucketSearcher(jobsContext, earliestTime, latestTime, request.isExcludeInterim(), - overallBucketsProvider, overallBucketsProcessor)); - } else { - listener.onResponse(null); - } - }, listener::onFailure)); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, + ActionListener.wrap(searchResponse -> { + long totalHits = searchResponse.getHits().getTotalHits(); + if (totalHits > 0) { + Aggregations aggregations = searchResponse.getAggregations(); + Min min = aggregations.get(EARLIEST_TIME); + long earliestTime = Intervals.alignToFloor((long) min.getValue(), maxBucketSpanMillis); + Max max = aggregations.get(LATEST_TIME); + long latestTime = Intervals.alignToCeil((long) max.getValue() + 1, maxBucketSpanMillis); + listener.onResponse(new ChunkedBucketSearcher(jobsContext, earliestTime, latestTime, request.isExcludeInterim(), + overallBucketsProvider, overallBucketsProcessor)); + } else { + listener.onResponse(null); + } + }, listener::onFailure), + client::search); } private static class JobsContext { @@ -540,16 +546,19 @@ public class GetOverallBucketsAction listener.onResponse(overallBucketsProcessor.finish()); return; } - client.search(nextSearch(), ActionListener.wrap(searchResponse -> { - Histogram histogram = searchResponse.getAggregations().get(Result.TIMESTAMP.getPreferredName()); - overallBucketsProcessor.process(overallBucketsProvider.computeOverallBuckets(histogram)); - if (overallBucketsProcessor.size() > MAX_RESULT_COUNT) { - listener.onFailure(ExceptionsHelper.badRequestException("Unable to return more than [{}] results; please use " + - "parameters [{}] and [{}] to limit the time range", MAX_RESULT_COUNT, Request.START, Request.END)); - return; - } - searchAndComputeOverallBuckets(listener); - }, listener::onFailure)); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, nextSearch(), + ActionListener.wrap(searchResponse -> { + Histogram histogram = searchResponse.getAggregations().get(Result.TIMESTAMP.getPreferredName()); + overallBucketsProcessor.process(overallBucketsProvider.computeOverallBuckets(histogram)); + if (overallBucketsProcessor.size() > MAX_RESULT_COUNT) { + listener.onFailure( + ExceptionsHelper.badRequestException("Unable to return more than [{}] results; please use " + + "parameters [{}] and [{}] to limit the time range", MAX_RESULT_COUNT, Request.START, Request.END)); + return; + } + searchAndComputeOverallBuckets(listener); + }, listener::onFailure), + client::search); } SearchRequest nextSearch() { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java index 3be9346254c..7e460a0a5d4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -72,7 +73,6 @@ import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.Persiste import org.elasticsearch.xpack.persistent.PersistentTasksExecutor; import org.elasticsearch.xpack.persistent.PersistentTasksService; import org.elasticsearch.xpack.persistent.PersistentTasksService.WaitForPersistentTaskStatusListener; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.ArrayList; @@ -85,6 +85,8 @@ import java.util.Objects; import java.util.Set; import java.util.function.Predicate; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE; public class OpenJobAction extends Action { @@ -387,12 +389,12 @@ public class OpenJobAction extends Action { + executeAsyncWithOrigin(client, ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, + ActionListener.wrap(response -> { if (response.isAcknowledged()) { listener.onResponse(true); } else { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java index 0c992168ed3..bd69afd1046 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java index 023d5e2073e..86e070dcb23 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; @@ -18,6 +19,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Strings; @@ -36,12 +38,14 @@ import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.job.config.MlFilter; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.Collections; import java.util.Objects; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + public class PutFilterAction extends Action { @@ -160,13 +164,13 @@ public class PutFilterAction extends Action { - private final InternalClient client; + private final Client client; @Inject public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - InternalClient client) { + Client client) { super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); this.client = client; @@ -186,17 +190,19 @@ public class PutFilterAction extends Action() { - @Override - public void onResponse(BulkResponse indexResponse) { - listener.onResponse(new Response()); - } + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse indexResponse) { + listener.onResponse(new Response()); + } - @Override - public void onFailure(Exception e) { - listener.onFailure(new ResourceNotFoundException("Could not create filter with ID [" + filter.getId() + "]", e)); - } - }); + @Override + public void onFailure(Exception e) { + listener.onFailure( + new ResourceNotFoundException("Could not create filter with ID [" + filter.getId() + "]", e)); + } + }); } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java index 05b93fcdbdd..f01e9d529fe 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/RevertModelSnapshotAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -43,7 +44,6 @@ import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.Date; @@ -252,7 +252,7 @@ extends Action { - private final InternalClient client; + private final Client client; private final JobManager jobManager; private final JobProvider jobProvider; private final JobDataCountsPersister jobDataCountsPersister; @@ -260,7 +260,7 @@ extends Action { @@ -422,7 +425,7 @@ public class StartDatafeedAction // The start datafeed api is a low through put api, so the fact that we redirect to elected master node shouldn't be an issue. public static class TransportAction extends TransportMasterNodeAction { - private final InternalClient client; + private final Client client; private final XPackLicenseState licenseState; private final PersistentTasksService persistentTasksService; @@ -430,11 +433,11 @@ public class StartDatafeedAction public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, XPackLicenseState licenseState, PersistentTasksService persistentTasksService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - InternalClient client) { + Client client) { super(settings, NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, Request::new); this.licenseState = licenseState; this.persistentTasksService = persistentTasksService; - this.client = client; + this.client = clientWithOrigin(client, ML_ORIGIN); } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java index baeed154556..12904d8a159 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateModelSnapshotAction.java @@ -12,12 +12,14 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseField; @@ -43,12 +45,14 @@ import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.job.results.Result; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.Objects; import java.util.function.Consumer; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + public class UpdateModelSnapshotAction extends Action { @@ -266,11 +270,11 @@ public class UpdateModelSnapshotAction extends Action { private final JobProvider jobProvider; - private final InternalClient client; + private final Client client; @Inject public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider, InternalClient client) { + IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider, Client client) { super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); this.jobProvider = jobProvider; this.client = client; @@ -318,17 +322,18 @@ public class UpdateModelSnapshotAction extends Action() { - @Override - public void onResponse(BulkResponse indexResponse) { - handler.accept(true); - } + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse indexResponse) { + handler.accept(true); + } - @Override - public void onFailure(Exception e) { - errorHandler.accept(e); - } - }); + @Override + public void onFailure(Exception e) { + errorHandler.accept(e); + } + }); } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java index f30e2f09168..6dbc1f0c07c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java @@ -11,6 +11,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.rest.RestStatus; @@ -31,6 +32,9 @@ import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; + class DatafeedJob { private static final Logger LOGGER = Loggers.getLogger(DatafeedJob.class); @@ -263,8 +267,10 @@ class DatafeedJob { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); Streams.copy(inputStream, outputStream); request.setContent(new BytesArray(outputStream.toByteArray()), xContentType); - PostDataAction.Response response = client.execute(PostDataAction.INSTANCE, request).actionGet(); - return response.getDataCounts(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + PostDataAction.Response response = client.execute(PostDataAction.INSTANCE, request).actionGet(); + return response.getDataCounts(); + } } private boolean isConflictException(Exception e) { @@ -284,7 +290,9 @@ class DatafeedJob { private FlushJobAction.Response flushJob(FlushJobAction.Request flushRequest) { try { LOGGER.trace("[" + jobId + "] Sending flush request"); - return client.execute(FlushJobAction.INSTANCE, flushRequest).actionGet(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + return client.execute(FlushJobAction.INSTANCE, flushRequest).actionGet(); + } } catch (Exception e) { LOGGER.debug("[" + jobId + "] error while flushing job", e); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java index 39a84de4652..1a423a4d9da 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java @@ -26,6 +26,9 @@ import java.util.Objects; import java.util.function.Consumer; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.clientWithOrigin; + public class DatafeedJobBuilder { private final Client client; @@ -34,7 +37,7 @@ public class DatafeedJobBuilder { private final Supplier currentTimeSupplier; public DatafeedJobBuilder(Client client, JobProvider jobProvider, Auditor auditor, Supplier currentTimeSupplier) { - this.client = Objects.requireNonNull(client); + this.client = clientWithOrigin(client, ML_ORIGIN); this.jobProvider = Objects.requireNonNull(jobProvider); this.auditor = Objects.requireNonNull(auditor); this.currentTimeSupplier = Objects.requireNonNull(currentTimeSupplier); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java index 422bda11088..c93c5d3dfd9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java @@ -43,6 +43,8 @@ import java.util.concurrent.locks.ReentrantLock; import java.util.function.Consumer; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.persistent.PersistentTasksService.WaitForPersistentTaskStatusListener; public class DatafeedManager extends AbstractComponent { @@ -415,20 +417,21 @@ public class DatafeedManager extends AbstractComponent { for the close job api call. */ closeJobRequest.setLocal(true); - client.execute(CloseJobAction.INSTANCE, closeJobRequest, new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, CloseJobAction.INSTANCE, closeJobRequest, + new ActionListener() { - @Override - public void onResponse(CloseJobAction.Response response) { - if (!response.isClosed()) { - logger.error("[{}] job close action was not acknowledged", getJobId()); - } - } + @Override + public void onResponse(CloseJobAction.Response response) { + if (!response.isClosed()) { + logger.error("[{}] job close action was not acknowledged", getJobId()); + } + } - @Override - public void onFailure(Exception e) { - logger.error("[" + getJobId() + "] failed to auto-close job", e); - } - }); + @Override + public void onFailure(Exception e) { + logger.error("[" + getJobId() + "] failed to auto-close job", e); + } + }); } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java index 6f07a31fead..2fb3b388b56 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java @@ -133,7 +133,7 @@ public class ChunkedDataExtractor implements DataExtractor { } protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { - return searchRequestBuilder.get(); + return searchRequestBuilder.get(); } private Optional getNextStream() throws IOException { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java index 0a60d0cd637..fe122bce379 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; import org.elasticsearch.client.Client; -import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java index e5b0e3fe226..a57609b1069 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java index e44c92f4ecf..6890feacaa5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java @@ -21,6 +21,8 @@ import org.elasticsearch.xpack.ml.job.config.JobUpdate; import java.util.concurrent.LinkedBlockingQueue; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.ml.action.UpdateProcessAction.Request; import static org.elasticsearch.xpack.ml.action.UpdateProcessAction.Response; @@ -98,7 +100,7 @@ public class UpdateJobProcessNotifier extends AbstractComponent implements Local void executeRemoteJob(JobUpdate update) { Request request = new Request(update.getJobId(), update.getModelPlotConfig(), update.getDetectorUpdates()); - client.execute(UpdateProcessAction.INSTANCE, request, + executeAsyncWithOrigin(client, ML_ORIGIN, UpdateProcessAction.INSTANCE, request, new ActionListener() { @Override public void onResponse(Response response) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java index e318b0cd2bc..115d678665c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.component.AbstractComponent; @@ -19,6 +21,8 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; /** * Update a job's dataCounts @@ -47,9 +51,11 @@ public class JobDataCountsPersister extends AbstractComponent { */ public void persistDataCounts(String jobId, DataCounts counts, ActionListener listener) { try (XContentBuilder content = serialiseCounts(counts)) { - client.prepareIndex(AnomalyDetectorsIndex.resultsWriteAlias(jobId), ElasticsearchMappings.DOC_TYPE, + final IndexRequest request = client.prepareIndex(AnomalyDetectorsIndex.resultsWriteAlias(jobId), ElasticsearchMappings.DOC_TYPE, DataCounts.documentId(jobId)) - .setSource(content).execute(new ActionListener() { + .setSource(content) + .request(); + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, request, new ActionListener() { @Override public void onResponse(IndexResponse indexResponse) { listener.onResponse(true); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index e679e08e790..94eba695592 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -15,10 +16,10 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -29,6 +30,10 @@ import org.elasticsearch.xpack.ml.job.results.Result; import java.util.List; import java.util.Objects; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; + public class JobDataDeleter { private static final Logger LOGGER = Loggers.getLogger(JobDataDeleter.class); @@ -105,8 +110,7 @@ public class JobDataDeleter { bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); try { - // TODO: change docDeleteListener to listener in 7.0 - bulkRequestBuilder.execute(docDeleteListener); + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), docDeleteListener); } catch (Exception e) { listener.onFailure(e); } @@ -127,17 +131,8 @@ public class JobDataDeleter { .filter(QueryBuilders.rangeQuery(Result.TIMESTAMP.getPreferredName()).gte(cutoffEpochMs)); deleteByQueryHolder.searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); deleteByQueryHolder.searchRequest.source(new SearchSourceBuilder().query(query)); - client.execute(DeleteByQueryAction.INSTANCE, deleteByQueryHolder.dbqRequest, new ActionListener() { - @Override - public void onResponse(BulkByScrollResponse bulkByScrollResponse) { - listener.onResponse(true); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, deleteByQueryHolder.dbqRequest, + ActionListener.wrap(r -> listener.onResponse(true), listener::onFailure)); } /** @@ -151,7 +146,7 @@ public class JobDataDeleter { QueryBuilder qb = QueryBuilders.termQuery(Result.IS_INTERIM.getPreferredName(), true); deleteByQueryHolder.searchRequest.source(new SearchSourceBuilder().query(new ConstantScoreQueryBuilder(qb))); - try { + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { client.execute(DeleteByQueryAction.INSTANCE, deleteByQueryHolder.dbqRequest).get(); } catch (Exception e) { LOGGER.error("[" + jobId + "] An error occurred while deleting interim results", e); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index ab008e69252..90ac992eb96 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -12,7 +12,11 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.search.MultiSearchRequestBuilder; import org.elasticsearch.action.search.MultiSearchResponse; @@ -32,6 +36,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -89,6 +94,10 @@ import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; + public class JobProvider { private static final Logger LOGGER = Loggers.getLogger(JobProvider.class); @@ -200,7 +209,8 @@ public class JobProvider { } }; - msearch.execute(searchResponseActionListener); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, msearch.request(), searchResponseActionListener, + client::multiSearch); } @@ -214,15 +224,14 @@ public class JobProvider { String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(job.getId()); String indexName = job.getResultsIndexName(); - final ActionListener createAliasListener = ActionListener.wrap(success -> - client.admin().indices().prepareAliases() - .addAlias(indexName, readAliasName, QueryBuilders.termQuery(Job.ID.getPreferredName(), job.getId())) - .addAlias(indexName, writeAliasName) - // we could return 'success && r.isAcknowledged()' instead of 'true', but that makes - // testing not possible as we can't create IndicesAliasesResponse instance or - // mock IndicesAliasesResponse#isAcknowledged() - .execute(ActionListener.wrap(r -> finalListener.onResponse(true), finalListener::onFailure)), - finalListener::onFailure); + final ActionListener createAliasListener = ActionListener.wrap(success -> { + final IndicesAliasesRequest request = client.admin().indices().prepareAliases() + .addAlias(indexName, readAliasName, QueryBuilders.termQuery(Job.ID.getPreferredName(), job.getId())) + .addAlias(indexName, writeAliasName).request(); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, request, + ActionListener.wrap(r -> finalListener.onResponse(true), finalListener::onFailure), + client.admin().indices()::aliases); + }, finalListener::onFailure); // Indices can be shared, so only create if it doesn't exist already. Saves us a roundtrip if // already in the CS @@ -234,8 +243,8 @@ public class JobProvider { try (XContentBuilder termFieldsMapping = ElasticsearchMappings.termFieldsMapping(ElasticsearchMappings.DOC_TYPE, termFields)) { createIndexRequest.mapping(ElasticsearchMappings.DOC_TYPE, termFieldsMapping); } - client.admin().indices().create(createIndexRequest, - ActionListener.wrap( + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest, + ActionListener.wrap( r -> createAliasListener.onResponse(r.isAcknowledged()), e -> { // Possible that the index was created while the request was executing, @@ -248,7 +257,7 @@ public class JobProvider { finalListener.onFailure(e); } } - )); + ), client.admin().indices()::create); } else { long fieldCountLimit = MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.get(settings); if (violatedFieldCountLimit(indexName, termFields.size(), fieldCountLimit, state)) { @@ -297,19 +306,19 @@ public class JobProvider { private void updateIndexMappingWithTermFields(String indexName, Collection termFields, ActionListener listener) { // Put the whole "doc" mapping, not just the term fields, otherwise we'll wipe the _meta section of the mapping try (XContentBuilder termFieldsMapping = ElasticsearchMappings.docMapping(termFields)) { - client.admin().indices().preparePutMapping(indexName).setType(ElasticsearchMappings.DOC_TYPE) - .setSource(termFieldsMapping) - .execute(new ActionListener() { - @Override - public void onResponse(PutMappingResponse putMappingResponse) { - listener.onResponse(putMappingResponse.isAcknowledged()); - } + final PutMappingRequest request = client.admin().indices().preparePutMapping(indexName).setType(ElasticsearchMappings.DOC_TYPE) + .setSource(termFieldsMapping).request(); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, request, new ActionListener() { + @Override + public void onResponse(PutMappingResponse putMappingResponse) { + listener.onResponse(putMappingResponse.isAcknowledged()); + } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, client.admin().indices()::putMapping); } catch (IOException e) { listener.onFailure(e); } @@ -353,43 +362,44 @@ public class JobProvider { msearch.add(createDocIdSearch(MlMetaIndex.INDEX_NAME, filterId)); } - msearch.execute(ActionListener.wrap( - response -> { - for (int i = 0; i < response.getResponses().length; i++) { - MultiSearchResponse.Item itemResponse = response.getResponses()[i]; - if (itemResponse.isFailure()) { - errorHandler.accept(itemResponse.getFailure()); - } else { - SearchResponse searchResponse = itemResponse.getResponse(); - ShardSearchFailure[] shardFailures = searchResponse.getShardFailures(); - int unavailableShards = searchResponse.getTotalShards() - searchResponse.getSuccessfulShards(); - if (shardFailures != null && shardFailures.length > 0) { - LOGGER.error("[{}] Search request returned shard failures: {}", jobId, - Arrays.toString(shardFailures)); - errorHandler.accept(new ElasticsearchException( - ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures))); - } else if (unavailableShards > 0) { - errorHandler.accept(new ElasticsearchException("[" + jobId - + "] Search request encountered [" + unavailableShards + "] unavailable shards")); - } else { - SearchHits hits = searchResponse.getHits(); - long hitsCount = hits.getHits().length; - if (hitsCount == 0) { - SearchRequest searchRequest = msearch.request().requests().get(i); - LOGGER.debug("Found 0 hits for [{}/{}]", searchRequest.indices(), searchRequest.types()); - } else if (hitsCount == 1) { - parseAutodetectParamSearchHit(jobId, paramsBuilder, hits.getAt(0), errorHandler); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, msearch.request(), + ActionListener.wrap( + response -> { + for (int i = 0; i < response.getResponses().length; i++) { + MultiSearchResponse.Item itemResponse = response.getResponses()[i]; + if (itemResponse.isFailure()) { + errorHandler.accept(itemResponse.getFailure()); } else { - errorHandler.accept(new IllegalStateException("Expected hits count to be 0 or 1, but got [" - + hitsCount + "]")); + SearchResponse searchResponse = itemResponse.getResponse(); + ShardSearchFailure[] shardFailures = searchResponse.getShardFailures(); + int unavailableShards = searchResponse.getTotalShards() - searchResponse.getSuccessfulShards(); + if (shardFailures != null && shardFailures.length > 0) { + LOGGER.error("[{}] Search request returned shard failures: {}", jobId, + Arrays.toString(shardFailures)); + errorHandler.accept(new ElasticsearchException( + ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures))); + } else if (unavailableShards > 0) { + errorHandler.accept(new ElasticsearchException("[" + jobId + + "] Search request encountered [" + unavailableShards + "] unavailable shards")); + } else { + SearchHits hits = searchResponse.getHits(); + long hitsCount = hits.getHits().length; + if (hitsCount == 0) { + SearchRequest searchRequest = msearch.request().requests().get(i); + LOGGER.debug("Found 0 hits for [{}/{}]", searchRequest.indices(), searchRequest.types()); + } else if (hitsCount == 1) { + parseAutodetectParamSearchHit(jobId, paramsBuilder, hits.getAt(0), errorHandler); + } else { + errorHandler.accept(new IllegalStateException("Expected hits count to be 0 or 1, but got [" + + hitsCount + "]")); + } + } } } - } - } - consumer.accept(paramsBuilder.build()); - }, - errorHandler - )); + consumer.accept(paramsBuilder.build()); + }, + errorHandler + ), client::multiSearch); } private SearchRequestBuilder createDocIdSearch(String index, String id) { @@ -456,33 +466,34 @@ public class JobProvider { searchRequest.source(query.build()); searchRequest.indicesOptions(addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); - client.search(searchRequest, ActionListener.wrap(searchResponse -> { - SearchHits hits = searchResponse.getHits(); - List results = new ArrayList<>(); - for (SearchHit hit : hits.getHits()) { - BytesReference source = hit.getSourceRef(); - try (XContentParser parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source)) { - Bucket bucket = Bucket.PARSER.apply(parser, null); - results.add(bucket); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to parse bucket", e); - } - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, + ActionListener.wrap(searchResponse -> { + SearchHits hits = searchResponse.getHits(); + List results = new ArrayList<>(); + for (SearchHit hit : hits.getHits()) { + BytesReference source = hit.getSourceRef(); + try (XContentParser parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source)) { + Bucket bucket = Bucket.PARSER.apply(parser, null); + results.add(bucket); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to parse bucket", e); + } + } - if (query.hasTimestamp() && results.isEmpty()) { - throw QueryPage.emptyQueryPage(Bucket.RESULTS_FIELD); - } + if (query.hasTimestamp() && results.isEmpty()) { + throw QueryPage.emptyQueryPage(Bucket.RESULTS_FIELD); + } - QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits(), Bucket.RESULTS_FIELD); + QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits(), Bucket.RESULTS_FIELD); - if (query.isExpand()) { - Iterator bucketsToExpand = buckets.results().stream() - .filter(bucket -> bucket.getBucketInfluencers().size() > 0).iterator(); - expandBuckets(jobId, query, buckets, bucketsToExpand, handler, errorHandler, client); - } else { - handler.accept(buckets); - } - }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetBucketsAction.NAME)))); + if (query.isExpand()) { + Iterator bucketsToExpand = buckets.results().stream() + .filter(bucket -> bucket.getBucketInfluencers().size() > 0).iterator(); + expandBuckets(jobId, query, buckets, bucketsToExpand, handler, errorHandler, client); + } else { + handler.accept(buckets); + } + }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetBucketsAction.NAME))), client::search); } private void expandBuckets(String jobId, BucketsQueryBuilder query, QueryPage buckets, Iterator bucketsToExpand, @@ -585,22 +596,23 @@ public class JobProvider { throw new IllegalStateException("Both categoryId and pageParams are not specified"); } searchRequest.source(sourceBuilder); - client.search(searchRequest, ActionListener.wrap(searchResponse -> { - SearchHit[] hits = searchResponse.getHits().getHits(); - List results = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - BytesReference source = hit.getSourceRef(); - try (XContentParser parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source)) { - CategoryDefinition categoryDefinition = CategoryDefinition.PARSER.apply(parser, null); - results.add(categoryDefinition); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to parse category definition", e); - } - } - QueryPage result = - new QueryPage<>(results, searchResponse.getHits().getTotalHits(), CategoryDefinition.RESULTS_FIELD); - handler.accept(result); - }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetCategoriesAction.NAME)))); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, + ActionListener.wrap(searchResponse -> { + SearchHit[] hits = searchResponse.getHits().getHits(); + List results = new ArrayList<>(hits.length); + for (SearchHit hit : hits) { + BytesReference source = hit.getSourceRef(); + try (XContentParser parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source)) { + CategoryDefinition categoryDefinition = CategoryDefinition.PARSER.apply(parser, null); + results.add(categoryDefinition); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to parse category definition", e); + } + } + QueryPage result = + new QueryPage<>(results, searchResponse.getHits().getTotalHits(), CategoryDefinition.RESULTS_FIELD); + handler.accept(result); + }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetCategoriesAction.NAME))), client::search); } /** @@ -618,20 +630,21 @@ public class JobProvider { searchRequest.source(recordsQueryBuilder.build()); LOGGER.trace("ES API CALL: search all of records from index {} with query {}", indexName, searchSourceBuilder); - client.search(searchRequest, ActionListener.wrap(searchResponse -> { - List results = new ArrayList<>(); - for (SearchHit hit : searchResponse.getHits().getHits()) { - BytesReference source = hit.getSourceRef(); - try (XContentParser parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source)) { - results.add(AnomalyRecord.PARSER.apply(parser, null)); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to parse records", e); - } - } - QueryPage queryPage = - new QueryPage<>(results, searchResponse.getHits().getTotalHits(), AnomalyRecord.RESULTS_FIELD); - handler.accept(queryPage); - }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetRecordsAction.NAME)))); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, + ActionListener.wrap(searchResponse -> { + List results = new ArrayList<>(); + for (SearchHit hit : searchResponse.getHits().getHits()) { + BytesReference source = hit.getSourceRef(); + try (XContentParser parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source)) { + results.add(AnomalyRecord.PARSER.apply(parser, null)); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to parse records", e); + } + } + QueryPage queryPage = + new QueryPage<>(results, searchResponse.getHits().getTotalHits(), AnomalyRecord.RESULTS_FIELD); + handler.accept(queryPage); + }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetRecordsAction.NAME))), client::search); } /** @@ -664,19 +677,21 @@ public class JobProvider { : new FieldSortBuilder(query.getSortField()).order(query.isSortDescending() ? SortOrder.DESC : SortOrder.ASC); searchRequest.source(new SearchSourceBuilder().query(qb).from(query.getFrom()).size(query.getSize()).sort(sb)); - client.search(searchRequest, ActionListener.wrap(response -> { - List influencers = new ArrayList<>(); - for (SearchHit hit : response.getHits().getHits()) { - BytesReference source = hit.getSourceRef(); - try (XContentParser parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source)) { - influencers.add(Influencer.PARSER.apply(parser, null)); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to parse influencer", e); - } - } - QueryPage result = new QueryPage<>(influencers, response.getHits().getTotalHits(), Influencer.RESULTS_FIELD); - handler.accept(result); - }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetInfluencersAction.NAME)))); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, + ActionListener.wrap(response -> { + List influencers = new ArrayList<>(); + for (SearchHit hit : response.getHits().getHits()) { + BytesReference source = hit.getSourceRef(); + try (XContentParser parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source)) { + influencers.add(Influencer.PARSER.apply(parser, null)); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to parse influencer", e); + } + } + QueryPage result = + new QueryPage<>(influencers, response.getHits().getTotalHits(), Influencer.RESULTS_FIELD); + handler.accept(result); + }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetInfluencersAction.NAME))), client::search); } /** @@ -780,16 +795,17 @@ public class JobProvider { sourceBuilder.from(from); sourceBuilder.size(size); searchRequest.source(sourceBuilder); - client.search(searchRequest, ActionListener.wrap(searchResponse -> { - List results = new ArrayList<>(); - for (SearchHit hit : searchResponse.getHits().getHits()) { - results.add(ModelSnapshot.fromJson(hit.getSourceRef())); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, + ActionListener.wrap(searchResponse -> { + List results = new ArrayList<>(); + for (SearchHit hit : searchResponse.getHits().getHits()) { + results.add(ModelSnapshot.fromJson(hit.getSourceRef())); + } - QueryPage result = - new QueryPage<>(results, searchResponse.getHits().getTotalHits(), ModelSnapshot.RESULTS_FIELD); - handler.accept(result); - }, errorHandler)); + QueryPage result = + new QueryPage<>(results, searchResponse.getHits().getTotalHits(), ModelSnapshot.RESULTS_FIELD); + handler.accept(result); + }, errorHandler), client::search); } public QueryPage modelPlot(String jobId, int from, int size) { @@ -797,11 +813,13 @@ public class JobProvider { String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); LOGGER.trace("ES API CALL: search model plots from index {} from {} size {}", indexName, from, size); - searchResponse = client.prepareSearch(indexName) - .setIndicesOptions(addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)) - .setQuery(new TermsQueryBuilder(Result.RESULT_TYPE.getPreferredName(), ModelPlot.RESULT_TYPE_VALUE)) - .setFrom(from).setSize(size) - .get(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + searchResponse = client.prepareSearch(indexName) + .setIndicesOptions(addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)) + .setQuery(new TermsQueryBuilder(Result.RESULT_TYPE.getPreferredName(), ModelPlot.RESULT_TYPE_VALUE)) + .setFrom(from).setSize(size) + .get(); + } List results = new ArrayList<>(); @@ -834,20 +852,21 @@ public class JobProvider { private void searchSingleResult(String jobId, String resultDescription, SearchRequestBuilder search, BiFunction objectParser, Consumer> handler, Consumer errorHandler, Supplier notFoundSupplier) { - search.execute(ActionListener.wrap( - response -> { - SearchHit[] hits = response.getHits().getHits(); - if (hits.length == 0) { - LOGGER.trace("No {} for job with id {}", resultDescription, jobId); - handler.accept(new Result<>(null, notFoundSupplier.get())); - } else if (hits.length == 1) { - handler.accept(new Result<>(hits[0].getIndex(), parseSearchHit(hits[0], objectParser, errorHandler))); - } else { - errorHandler.accept(new IllegalStateException("Search for unique [" + resultDescription + "] returned [" - + hits.length + "] hits even though size was 1")); - } - }, errorHandler - )); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, search.request(), + ActionListener.wrap( + response -> { + SearchHit[] hits = response.getHits().getHits(); + if (hits.length == 0) { + LOGGER.trace("No {} for job with id {}", resultDescription, jobId); + handler.accept(new Result<>(null, notFoundSupplier.get())); + } else if (hits.length == 1) { + handler.accept(new Result<>(hits[0].getIndex(), parseSearchHit(hits[0], objectParser, errorHandler))); + } else { + errorHandler.accept(new IllegalStateException("Search for unique [" + resultDescription + "] returned [" + + hits.length + "] hits even though size was 1")); + } + }, errorHandler + ), client::search); } private SearchRequestBuilder createLatestModelSizeStatsSearch(String indexName) { @@ -892,36 +911,38 @@ public class JobProvider { .filter(QueryBuilders.rangeQuery(Result.TIMESTAMP.getPreferredName()).gte(searchFromTimeMs)) .filter(QueryBuilders.termQuery(Result.RESULT_TYPE.getPreferredName(), ModelSizeStats.RESULT_TYPE_VALUE))) .addAggregation(AggregationBuilders.extendedStats("es").field(ModelSizeStats.MODEL_BYTES_FIELD.getPreferredName())); - search.execute(ActionListener.wrap( - response -> { - List aggregations = response.getAggregations().asList(); - if (aggregations.size() == 1) { - ExtendedStats extendedStats = (ExtendedStats) aggregations.get(0); - long count = extendedStats.getCount(); - if (count <= 0) { - // model size stats haven't changed in the last N buckets, so the latest (older) ones are established - handleLatestModelSizeStats(jobId, latestModelSizeStats, handler, errorHandler); - } else if (count == 1) { - // no need to do an extra search in the case of exactly one document being aggregated - handler.accept((long) extendedStats.getAvg()); - } else { - double coefficientOfVaration = extendedStats.getStdDeviation() / extendedStats.getAvg(); - LOGGER.trace("[{}] Coefficient of variation [{}] when calculating established memory use", jobId, - coefficientOfVaration); - // is there sufficient stability in the latest model size stats readings? - if (coefficientOfVaration <= ESTABLISHED_MEMORY_CV_THRESHOLD) { - // yes, so return the latest model size as established - handleLatestModelSizeStats(jobId, latestModelSizeStats, handler, errorHandler); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, search.request(), + ActionListener.wrap( + response -> { + List aggregations = response.getAggregations().asList(); + if (aggregations.size() == 1) { + ExtendedStats extendedStats = (ExtendedStats) aggregations.get(0); + long count = extendedStats.getCount(); + if (count <= 0) { + // model size stats haven't changed in the last N buckets, + // so the latest (older) ones are established + handleLatestModelSizeStats(jobId, latestModelSizeStats, handler, errorHandler); + } else if (count == 1) { + // no need to do an extra search in the case of exactly one document being aggregated + handler.accept((long) extendedStats.getAvg()); + } else { + double coefficientOfVaration = extendedStats.getStdDeviation() / extendedStats.getAvg(); + LOGGER.trace("[{}] Coefficient of variation [{}] when calculating established memory use", + jobId, coefficientOfVaration); + // is there sufficient stability in the latest model size stats readings? + if (coefficientOfVaration <= ESTABLISHED_MEMORY_CV_THRESHOLD) { + // yes, so return the latest model size as established + handleLatestModelSizeStats(jobId, latestModelSizeStats, handler, errorHandler); + } else { + // no - we don't have an established model size + handler.accept(0L); + } + } } else { - // no - we don't have an established model size handler.accept(0L); } - } - } else { - handler.accept(0L); - } - }, errorHandler - )); + }, errorHandler + ), client::search); } else { LOGGER.trace("[{}] Insufficient history to calculate established memory use", jobId); handler.accept(0L); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersister.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersister.java index b23b5ec65be..661c7155795 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersister.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersister.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.ml.job.process.normalizer.BucketNormalizable; @@ -23,6 +24,8 @@ import java.io.IOException; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; @@ -98,9 +101,11 @@ public class JobRenormalizedResultsPersister extends AbstractComponent { } logger.trace("[{}] ES API CALL: bulk request with {} actions", jobId, bulkRequest.numberOfActions()); - BulkResponse addRecordsResponse = client.bulk(bulkRequest).actionGet(); - if (addRecordsResponse.hasFailures()) { - logger.error("[{}] Bulk index of results has errors: {}", jobId, addRecordsResponse.buildFailureMessage()); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + BulkResponse addRecordsResponse = client.bulk(bulkRequest).actionGet(); + if (addRecordsResponse.hasFailures()) { + logger.error("[{}] Bulk index of results has errors: {}", jobId, addRecordsResponse.buildFailureMessage()); + } } bulkRequest = new BulkRequest(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java index e54bf36588f..d88f2390241 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; @@ -40,6 +41,9 @@ import java.util.List; import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; /** @@ -187,9 +191,11 @@ public class JobResultsPersister extends AbstractComponent { } logger.trace("[{}] ES API CALL: bulk request with {} actions", jobId, bulkRequest.numberOfActions()); - BulkResponse addRecordsResponse = client.bulk(bulkRequest).actionGet(); - if (addRecordsResponse.hasFailures()) { - logger.error("[{}] Bulk index of results has errors: {}", jobId, addRecordsResponse.buildFailureMessage()); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + BulkResponse addRecordsResponse = client.bulk(bulkRequest).actionGet(); + if (addRecordsResponse.hasFailures()) { + logger.error("[{}] Bulk index of results has errors: {}", jobId, addRecordsResponse.buildFailureMessage()); + } } bulkRequest = new BulkRequest(); @@ -284,7 +290,9 @@ public class JobResultsPersister extends AbstractComponent { logger.trace("[{}] ES API CALL: refresh index {}", jobId, indexName); RefreshRequest refreshRequest = new RefreshRequest(indexName); refreshRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); - client.admin().indices().refresh(refreshRequest).actionGet(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + client.admin().indices().refresh(refreshRequest).actionGet(); + } } /** @@ -299,7 +307,9 @@ public class JobResultsPersister extends AbstractComponent { logger.trace("[{}] ES API CALL: refresh index {}", jobId, indexName); RefreshRequest refreshRequest = new RefreshRequest(indexName); refreshRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); - client.admin().indices().refresh(refreshRequest).actionGet(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + client.admin().indices().refresh(refreshRequest).actionGet(); + } } private XContentBuilder toXContentBuilder(ToXContent obj) throws IOException { @@ -337,7 +347,7 @@ public class JobResultsPersister extends AbstractComponent { try (XContentBuilder content = toXContentBuilder(object)) { IndexRequest indexRequest = new IndexRequest(indexName, DOC_TYPE, id).source(content).setRefreshPolicy(refreshPolicy); - client.index(indexRequest, listener); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest, listener, client::index); } catch (IOException e) { logger.error(new ParameterizedMessage("[{}] Error writing [{}]", jobId, (id == null) ? "auto-generated ID" : id), e); IndexResponse.Builder notCreatedResponse = new IndexResponse.Builder(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java index a4b7d1584f3..14884ff07e4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchRequest; @@ -39,6 +41,9 @@ import java.util.List; import java.util.Set; import java.util.function.Consumer; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + public class JobStorageDeletionTask extends Task { private final Logger logger; @@ -88,7 +93,7 @@ public class JobStorageDeletionTask extends Task { request.setAbortOnVersionConflict(false); request.setRefresh(true); - client.execute(DeleteByQueryAction.INSTANCE, request, dbqHandler); + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, dbqHandler); }, failureHandler); @@ -119,7 +124,7 @@ public class JobStorageDeletionTask extends Task { request.setAbortOnVersionConflict(false); request.setRefresh(true); - client.execute(DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( response -> finishedHandler.onResponse(true), e -> { // It's not a problem for us if the index wasn't found - it's equivalent to document not found @@ -155,7 +160,7 @@ public class JobStorageDeletionTask extends Task { request.setAbortOnVersionConflict(false); request.setRefresh(true); - client.execute(DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap( response -> { // If we successfully deleted a document try the next one; if not we're done if (response.getDeleted() > 0) { @@ -183,27 +188,30 @@ public class JobStorageDeletionTask extends Task { // first find the concrete indices associated with the aliases GetAliasesRequest aliasesRequest = new GetAliasesRequest().aliases(readAliasName, writeAliasName) .indicesOptions(IndicesOptions.lenientExpandOpen()); - client.admin().indices().getAliases(aliasesRequest, ActionListener.wrap( - getAliasesResponse -> { - Set aliases = new HashSet<>(); - getAliasesResponse.getAliases().valuesIt().forEachRemaining( - metaDataList -> metaDataList.forEach(metadata -> aliases.add(metadata.getAlias()))); - if (aliases.isEmpty()) { - // don't error if the job's aliases have already been deleted - carry on and delete the rest of the job's data - finishedHandler.onResponse(true); - return; - } - List indices = new ArrayList<>(); - getAliasesResponse.getAliases().keysIt().forEachRemaining(indices::add); - // remove the aliases from the concrete indices found in the first step - IndicesAliasesRequest removeRequest = new IndicesAliasesRequest().addAliasAction( - IndicesAliasesRequest.AliasActions.remove() - .aliases(aliases.toArray(new String[aliases.size()])) - .indices(indices.toArray(new String[indices.size()]))); - client.admin().indices().aliases(removeRequest, ActionListener.wrap( - removeResponse -> finishedHandler.onResponse(true), - finishedHandler::onFailure)); - }, - finishedHandler::onFailure)); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, aliasesRequest, + ActionListener.wrap( + getAliasesResponse -> { + Set aliases = new HashSet<>(); + getAliasesResponse.getAliases().valuesIt().forEachRemaining( + metaDataList -> metaDataList.forEach(metadata -> aliases.add(metadata.getAlias()))); + if (aliases.isEmpty()) { + // don't error if the job's aliases have already been deleted - carry on and delete the + // rest of the job's data + finishedHandler.onResponse(true); + return; + } + List indices = new ArrayList<>(); + getAliasesResponse.getAliases().keysIt().forEachRemaining(indices::add); + // remove the aliases from the concrete indices found in the first step + IndicesAliasesRequest removeRequest = new IndicesAliasesRequest().addAliasAction( + IndicesAliasesRequest.AliasActions.remove() + .aliases(aliases.toArray(new String[aliases.size()])) + .indices(indices.toArray(new String[indices.size()]))); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, removeRequest, + ActionListener.wrap(removeResponse -> finishedHandler.onResponse(true), + finishedHandler::onFailure), + client.admin().indices()::aliases); + }, + finishedHandler::onFailure), client.admin().indices()::getAliases); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java index 81b818e0ca2..ada27bf9f9a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; @@ -19,6 +20,9 @@ import java.io.IOException; import java.io.OutputStream; import java.util.Objects; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; + /** * A {@code StateStreamer} fetches the various state documents and * writes them into a stream. It allows cancellation via its @@ -66,13 +70,15 @@ public class StateStreamer { LOGGER.trace("ES API CALL: get ID {} from index {}", stateDocId, indexName); - GetResponse stateResponse = client.prepareGet(indexName, ElasticsearchMappings.DOC_TYPE, stateDocId).get(); - if (!stateResponse.isExists()) { - LOGGER.error("Expected {} documents for model state for {} snapshot {} but failed to find {}", - modelSnapshot.getSnapshotDocCount(), jobId, modelSnapshot.getSnapshotId(), stateDocId); - break; + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + GetResponse stateResponse = client.prepareGet(indexName, ElasticsearchMappings.DOC_TYPE, stateDocId).get(); + if (!stateResponse.isExists()) { + LOGGER.error("Expected {} documents for model state for {} snapshot {} but failed to find {}", + modelSnapshot.getSnapshotDocCount(), jobId, modelSnapshot.getSnapshotId(), stateDocId); + break; + } + writeStateToStream(stateResponse.getSourceAsBytesRef(), restoreStream); } - writeStateToStream(stateResponse.getSourceAsBytesRef(), restoreStream); } // Secondly try to restore categorizer state. This must come after model state because that's @@ -88,11 +94,13 @@ public class StateStreamer { LOGGER.trace("ES API CALL: get ID {} from index {}", docId, indexName); - GetResponse stateResponse = client.prepareGet(indexName, ElasticsearchMappings.DOC_TYPE, docId).get(); - if (!stateResponse.isExists()) { - break; + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + GetResponse stateResponse = client.prepareGet(indexName, ElasticsearchMappings.DOC_TYPE, docId).get(); + if (!stateResponse.isExists()) { + break; + } + writeStateToStream(stateResponse.getSourceAsBytesRef(), restoreStream); } - writeStateToStream(stateResponse.getSourceAsBytesRef(), restoreStream); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java index 52c85eecd20..c5952a6999f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java @@ -42,6 +42,9 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + /** * A runnable class that reads the autodetect process output in the * {@link #process(AutodetectProcess)} method and persists parsed @@ -299,7 +302,7 @@ public class AutoDetectResultProcessor { return; } - client.execute(UpdateJobAction.INSTANCE, updateRequest, new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, UpdateJobAction.INSTANCE, updateRequest, new ActionListener() { @Override public void onResponse(PutJobAction.Response response) { updateModelSnapshotIdSemaphore.release(); @@ -309,7 +312,8 @@ public class AutoDetectResultProcessor { @Override public void onFailure(Exception e) { updateModelSnapshotIdSemaphore.release(); - LOGGER.error("[" + jobId + "] Failed to update job with new model snapshot id [" + modelSnapshot.getSnapshotId() + "]", e); + LOGGER.error("[" + jobId + "] Failed to update job with new model snapshot id [" + + modelSnapshot.getSnapshotId() + "]", e); } }); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessor.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessor.java index 2c9c7bf564b..65902d22a42 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessor.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.CompositeBytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; @@ -21,6 +22,9 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; + /** * Reads the autodetect state and persists via a bulk request */ @@ -91,7 +95,9 @@ public class StateProcessor extends AbstractComponent { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(bytes, AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, XContentType.JSON); if (bulkRequest.numberOfActions() > 0) { - client.bulk(bulkRequest).actionGet(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + client.bulk(bulkRequest).actionGet(); + } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java index 2f3a75fd37d..e83adc109a4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java @@ -33,6 +33,9 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Objects; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + /** * Removes all results that have expired the configured retention time * of their respective job. A result is deleted if its timestamp is earlier @@ -62,7 +65,7 @@ public class ExpiredResultsRemover extends AbstractExpiredJobDataRemover { LOGGER.debug("Removing results of job [{}] that have a timestamp before [{}]", job.getId(), cutoffEpochMs); DeleteByQueryRequest request = createDBQRequest(job, cutoffEpochMs); - client.execute(DeleteByQueryAction.INSTANCE, request, new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, new ActionListener() { @Override public void onResponse(BulkByScrollResponse bulkByScrollResponse) { try { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/notifications/Auditor.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/notifications/Auditor.java index 30c8fb1f3d5..77b093348b0 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/notifications/Auditor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/notifications/Auditor.java @@ -21,6 +21,8 @@ import java.io.IOException; import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; public class Auditor { @@ -51,17 +53,17 @@ public class Auditor { IndexRequest indexRequest = new IndexRequest(NOTIFICATIONS_INDEX, type); indexRequest.source(toXContentBuilder(toXContent)); indexRequest.timeout(TimeValue.timeValueSeconds(5)); - client.index(indexRequest, new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - LOGGER.trace("Successfully persisted {}", type); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + LOGGER.trace("Successfully persisted {}", type); + } - @Override - public void onFailure(Exception e) { - LOGGER.debug(new ParameterizedMessage("Error writing {}", new Object[]{type}, e)); - } - }); + @Override + public void onFailure(Exception e) { + LOGGER.debug(new ParameterizedMessage("Error writing {}", new Object[]{type}, e)); + } + }, client::index); } private XContentBuilder toXContentBuilder(ToXContent toXContent) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index da6eea84fc4..3e3bc5d0af7 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.monitoring; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; @@ -43,7 +45,6 @@ import org.elasticsearch.xpack.monitoring.exporter.Exporters; import org.elasticsearch.xpack.monitoring.exporter.http.HttpExporter; import org.elasticsearch.xpack.monitoring.exporter.local.LocalExporter; import org.elasticsearch.xpack.monitoring.rest.action.RestMonitoringBulkAction; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.ssl.SSLService; import java.util.ArrayList; @@ -137,7 +138,7 @@ public class Monitoring implements ActionPlugin { return modules; } - public Collection createComponents(InternalClient client, ThreadPool threadPool, ClusterService clusterService, + public Collection createComponents(Client client, ThreadPool threadPool, ClusterService clusterService, LicenseService licenseService, SSLService sslService) { if (enabled == false || tribeNode) { return Collections.emptyList(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java index 05756236a5c..a5a2c35b905 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java @@ -5,11 +5,13 @@ */ package org.elasticsearch.xpack.monitoring.collector.ml; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.XPackClient; import org.elasticsearch.xpack.XPackSettings; @@ -17,11 +19,13 @@ import org.elasticsearch.xpack.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.ml.client.MachineLearningClient; import org.elasticsearch.xpack.monitoring.collector.Collector; import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; -import org.elasticsearch.xpack.security.InternalClient; import java.util.List; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.ClientHelper.MONITORING_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; + /** * Collector for Machine Learning Job Stats. *

@@ -37,18 +41,19 @@ public class JobStatsCollector extends Collector { */ public static final Setting JOB_STATS_TIMEOUT = collectionTimeoutSetting("ml.job.stats.timeout"); + private final ThreadContext threadContext; private final MachineLearningClient client; public JobStatsCollector(final Settings settings, final ClusterService clusterService, - final XPackLicenseState licenseState, final InternalClient client) { - this(settings, clusterService, licenseState, new XPackClient(client).machineLearning()); + final XPackLicenseState licenseState, final Client client) { + this(settings, clusterService, licenseState, new XPackClient(client).machineLearning(), client.threadPool().getThreadContext()); } JobStatsCollector(final Settings settings, final ClusterService clusterService, - final XPackLicenseState licenseState, final MachineLearningClient client) { + final XPackLicenseState licenseState, final MachineLearningClient client, final ThreadContext threadContext) { super(settings, JobStatsMonitoringDoc.TYPE, clusterService, JOB_STATS_TIMEOUT, licenseState); - this.client = client; + this.threadContext = threadContext; } @Override @@ -62,16 +67,18 @@ public class JobStatsCollector extends Collector { @Override protected List doCollect(final MonitoringDoc.Node node, final long interval) throws Exception { // fetch details about all jobs - final GetJobsStatsAction.Response jobs = - client.getJobsStats(new GetJobsStatsAction.Request(MetaData.ALL)) - .actionGet(getCollectionTimeout()); + try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, MONITORING_ORIGIN)) { + final GetJobsStatsAction.Response jobs = + client.getJobsStats(new GetJobsStatsAction.Request(MetaData.ALL)) + .actionGet(getCollectionTimeout()); - final long timestamp = timestamp(); - final String clusterUuid = clusterUUID(); + final long timestamp = timestamp(); + final String clusterUuid = clusterUUID(); - return jobs.getResponse().results().stream() - .map(jobStats -> new JobStatsMonitoringDoc(clusterUuid, timestamp, interval, node, jobStats)) - .collect(Collectors.toList()); + return jobs.getResponse().results().stream() + .map(jobStats -> new JobStatsMonitoringDoc(clusterUuid, timestamp, interval, node, jobStats)) + .collect(Collectors.toList()); + } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalBulk.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalBulk.java index 016ff4fe7c6..f925f4df816 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalBulk.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalBulk.java @@ -9,7 +9,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; @@ -18,12 +20,14 @@ import org.elasticsearch.xpack.monitoring.exporter.ExportBulk; import org.elasticsearch.xpack.monitoring.exporter.ExportException; import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; -import org.elasticsearch.xpack.security.InternalClient; import org.joda.time.format.DateTimeFormatter; import java.util.Arrays; import java.util.Collection; +import static org.elasticsearch.xpack.ClientHelper.MONITORING_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + /** * LocalBulk exports monitoring data in the local cluster using bulk requests. Its usage is not thread safe since the * {@link LocalBulk#add(Collection)}, {@link LocalBulk#flush(org.elasticsearch.action.ActionListener)} and @@ -32,14 +36,14 @@ import java.util.Collection; public class LocalBulk extends ExportBulk { private final Logger logger; - private final InternalClient client; + private final Client client; private final DateTimeFormatter formatter; private final boolean usePipeline; private BulkRequestBuilder requestBuilder; - LocalBulk(String name, Logger logger, InternalClient client, DateTimeFormatter dateTimeFormatter, boolean usePipeline) { + LocalBulk(String name, Logger logger, Client client, DateTimeFormatter dateTimeFormatter, boolean usePipeline) { super(name, client.threadPool().getThreadContext()); this.logger = logger; this.client = client; @@ -101,13 +105,15 @@ public class LocalBulk extends ExportBulk { } else { try { logger.trace("exporter [{}] - exporting {} documents", name, requestBuilder.numberOfActions()); - requestBuilder.execute(ActionListener.wrap(bulkResponse -> { - if (bulkResponse.hasFailures()) { - throwExportException(bulkResponse.getItems(), listener); - } else { - listener.onResponse(null); - } - }, e -> listener.onFailure(new ExportException("failed to flush export bulk [{}]", e, name)))); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), MONITORING_ORIGIN, requestBuilder.request(), + ActionListener.wrap(bulkResponse -> { + if (bulkResponse.hasFailures()) { + throwExportException(bulkResponse.getItems(), listener); + } else { + listener.onResponse(null); + } + }, e -> listener.onFailure(new ExportException("failed to flush export bulk [{}]", e, name))), + client::bulk); } finally { requestBuilder = null; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index 4881977d7d3..42fed39ff4f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRespo import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -31,6 +32,7 @@ import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.IndexNotFoundException; @@ -44,12 +46,12 @@ import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.exporter.ClusterAlertsUtil; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.client.WatcherClient; import org.elasticsearch.xpack.watcher.transport.actions.delete.DeleteWatchRequest; import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchRequest; import org.elasticsearch.xpack.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchRequest; +import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.watcher.watch.Watch; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -69,6 +71,9 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static org.elasticsearch.common.Strings.collectionToCommaDelimitedString; +import static org.elasticsearch.xpack.ClientHelper.MONITORING_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.monitoring.Monitoring.CLEAN_WATCHER_HISTORY; import static org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils.LAST_UPDATED_VERSION; import static org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils.PIPELINE_IDS; @@ -82,7 +87,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle public static final String TYPE = "local"; - private final InternalClient client; + private final Client client; private final ClusterService clusterService; private final XPackLicenseState licenseState; private final CleanerService cleanerService; @@ -94,7 +99,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle private final AtomicBoolean waitedForSetup = new AtomicBoolean(false); private final AtomicBoolean watcherSetup = new AtomicBoolean(false); - public LocalExporter(Exporter.Config config, InternalClient client, CleanerService cleanerService) { + public LocalExporter(Exporter.Config config, Client client, CleanerService cleanerService) { super(config); this.client = client; this.clusterService = config.clusterService(); @@ -306,14 +311,16 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle if (watches != null && watches.allPrimaryShardsActive() == false) { logger.trace("cannot manage cluster alerts because [.watches] index is not allocated"); } else if ((watches == null || indexExists) && watcherSetup.compareAndSet(false, true)) { - installClusterAlerts(indexExists, asyncActions, pendingResponses); + getClusterAlertsInstallationAsyncActions(indexExists, asyncActions, pendingResponses); } } if (asyncActions.size() > 0) { if (installingSomething.compareAndSet(false, true)) { pendingResponses.set(asyncActions.size()); - asyncActions.forEach(Runnable::run); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), MONITORING_ORIGIN)) { + asyncActions.forEach(Runnable::run); + } } else { // let the cluster catch up since requested installations may be ongoing return false; @@ -383,7 +390,8 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle logger.debug("installing ingest pipeline [{}]", pipelineName); - client.admin().cluster().putPipeline(request, listener); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), MONITORING_ORIGIN, request, listener, + client.admin().cluster()::putPipeline); } private boolean hasTemplate(final ClusterState clusterState, final String templateName) { @@ -392,14 +400,15 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle return template != null && hasValidVersion(template.getVersion(), LAST_UPDATED_VERSION); } + // FIXME this should use the IndexTemplateMetaDataUpgrader private void putTemplate(String template, String source, ActionListener listener) { logger.debug("installing template [{}]", template); PutIndexTemplateRequest request = new PutIndexTemplateRequest(template).source(source, XContentType.JSON); assert !Thread.currentThread().isInterrupted() : "current thread has been interrupted before putting index template!!!"; - // async call, so we won't block cluster event thread - client.admin().indices().putTemplate(request, listener); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), MONITORING_ORIGIN, request, listener, + client.admin().indices()::putTemplate); } /** @@ -419,7 +428,8 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle * @param asyncActions Asynchronous actions are added to for each Watch. * @param pendingResponses Pending response countdown we use to track completion. */ - private void installClusterAlerts(final boolean indexExists, final List asyncActions, final AtomicInteger pendingResponses) { + private void getClusterAlertsInstallationAsyncActions(final boolean indexExists, final List asyncActions, + final AtomicInteger pendingResponses) { final XPackClient xpackClient = new XPackClient(client); final WatcherClient watcher = xpackClient.watcher(); final boolean canAddWatches = licenseState.isMonitoringClusterAlertsAllowed(); @@ -453,8 +463,10 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle logger.trace("adding monitoring watch [{}]", uniqueWatchId); - watcher.putWatch(new PutWatchRequest(uniqueWatchId, new BytesArray(watch), XContentType.JSON), - new ResponseActionListener<>("watch", uniqueWatchId, pendingResponses, watcherSetup)); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), MONITORING_ORIGIN, + new PutWatchRequest(uniqueWatchId, new BytesArray(watch), XContentType.JSON), + new ResponseActionListener("watch", uniqueWatchId, pendingResponses, watcherSetup), + watcher::putWatch); } /** @@ -531,24 +543,25 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle private void deleteIndices(Set indices) { logger.trace("deleting {} indices: [{}]", indices.size(), collectionToCommaDelimitedString(indices)); - client.admin().indices().delete(new DeleteIndexRequest(indices.toArray(new String[indices.size()])), + final DeleteIndexRequest request = new DeleteIndexRequest(indices.toArray(new String[indices.size()])); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), MONITORING_ORIGIN, request, new ActionListener() { - @Override - public void onResponse(DeleteIndexResponse response) { - if (response.isAcknowledged()) { - logger.debug("{} indices deleted", indices.size()); - } else { - // Probably means that the delete request has timed out, - // the indices will survive until the next clean up. - logger.warn("deletion of {} indices wasn't acknowledged", indices.size()); - } - } + @Override + public void onResponse(DeleteIndexResponse response) { + if (response.isAcknowledged()) { + logger.debug("{} indices deleted", indices.size()); + } else { + // Probably means that the delete request has timed out, + // the indices will survive until the next clean up. + logger.warn("deletion of {} indices wasn't acknowledged", indices.size()); + } + } - @Override - public void onFailure(Exception e) { - logger.error("failed to delete indices", e); - } - }); + @Override + public void onFailure(Exception e) { + logger.error("failed to delete indices", e); + } + }, client.admin().indices()::delete); } enum State { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksService.java b/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksService.java index 8026d6fe2ac..57008808b85 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/persistent/PersistentTasksService.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.persistent; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -21,20 +22,22 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; -import org.elasticsearch.xpack.security.InternalClient; import java.util.function.Predicate; +import static org.elasticsearch.xpack.ClientHelper.PERSISTENT_TASK_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + /** * This service is used by persistent actions to propagate changes in the action state and notify about completion */ public class PersistentTasksService extends AbstractComponent { - private final InternalClient client; + private final Client client; private final ClusterService clusterService; private final ThreadPool threadPool; - public PersistentTasksService(Settings settings, ClusterService clusterService, ThreadPool threadPool, InternalClient client) { + public PersistentTasksService(Settings settings, ClusterService clusterService, ThreadPool threadPool, Client client) { super(settings); this.client = client; this.clusterService = clusterService; @@ -50,8 +53,8 @@ public class PersistentTasksService extends AbstractComponent { StartPersistentTaskAction.Request createPersistentActionRequest = new StartPersistentTaskAction.Request(taskId, taskName, params); try { - client.execute(StartPersistentTaskAction.INSTANCE, createPersistentActionRequest, ActionListener.wrap( - o -> listener.onResponse((PersistentTask) o.getTask()), listener::onFailure)); + executeAsyncWithOrigin(client, PERSISTENT_TASK_ORIGIN, StartPersistentTaskAction.INSTANCE, createPersistentActionRequest, + ActionListener.wrap(o -> listener.onResponse((PersistentTask) o.getTask()), listener::onFailure)); } catch (Exception e) { listener.onFailure(e); } @@ -64,7 +67,7 @@ public class PersistentTasksService extends AbstractComponent { ActionListener> listener) { CompletionPersistentTaskAction.Request restartRequest = new CompletionPersistentTaskAction.Request(taskId, allocationId, failure); try { - client.execute(CompletionPersistentTaskAction.INSTANCE, restartRequest, + executeAsyncWithOrigin(client, PERSISTENT_TASK_ORIGIN, CompletionPersistentTaskAction.INSTANCE, restartRequest, ActionListener.wrap(o -> listener.onResponse(o.getTask()), listener::onFailure)); } catch (Exception e) { listener.onFailure(e); @@ -80,7 +83,8 @@ public class PersistentTasksService extends AbstractComponent { cancelTasksRequest.setTaskId(new TaskId(localNode.getId(), taskId)); cancelTasksRequest.setReason("persistent action was removed"); try { - client.admin().cluster().cancelTasks(cancelTasksRequest, listener); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), PERSISTENT_TASK_ORIGIN, cancelTasksRequest, listener, + client.admin().cluster()::cancelTasks); } catch (Exception e) { listener.onFailure(e); } @@ -96,8 +100,8 @@ public class PersistentTasksService extends AbstractComponent { UpdatePersistentTaskStatusAction.Request updateStatusRequest = new UpdatePersistentTaskStatusAction.Request(taskId, allocationId, status); try { - client.execute(UpdatePersistentTaskStatusAction.INSTANCE, updateStatusRequest, ActionListener.wrap( - o -> listener.onResponse(o.getTask()), listener::onFailure)); + executeAsyncWithOrigin(client, PERSISTENT_TASK_ORIGIN, UpdatePersistentTaskStatusAction.INSTANCE, updateStatusRequest, + ActionListener.wrap(o -> listener.onResponse(o.getTask()), listener::onFailure)); } catch (Exception e) { listener.onFailure(e); } @@ -109,8 +113,8 @@ public class PersistentTasksService extends AbstractComponent { public void cancelPersistentTask(String taskId, ActionListener> listener) { RemovePersistentTaskAction.Request removeRequest = new RemovePersistentTaskAction.Request(taskId); try { - client.execute(RemovePersistentTaskAction.INSTANCE, removeRequest, ActionListener.wrap(o -> listener.onResponse(o.getTask()), - listener::onFailure)); + executeAsyncWithOrigin(client, PERSISTENT_TASK_ORIGIN, RemovePersistentTaskAction.INSTANCE, removeRequest, + ActionListener.wrap(o -> listener.onResponse(o.getTask()), listener::onFailure)); } catch (Exception e) { listener.onFailure(e); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/InternalSecurityClient.java b/plugin/src/main/java/org/elasticsearch/xpack/security/InternalSecurityClient.java deleted file mode 100644 index e9e215615af..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/InternalSecurityClient.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security; - -import org.elasticsearch.client.Client; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.security.user.XPackSecurityUser; - -/** - * A special filter client for internal usage by security to modify the security index. - * - * The {@link XPackSecurityUser} user is added to the execution context before each action is executed. - */ -public class InternalSecurityClient extends InternalClient { - - public InternalSecurityClient(Settings settings, ThreadPool threadPool, Client in) { - super(settings, threadPool, in, XPackSecurityUser.INSTANCE); - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/InternalClient.java b/plugin/src/main/java/org/elasticsearch/xpack/security/ScrollHelper.java similarity index 57% rename from plugin/src/main/java/org/elasticsearch/xpack/security/InternalClient.java rename to plugin/src/main/java/org/elasticsearch/xpack/security/ScrollHelper.java index dd272c16fd0..971c8600814 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/InternalClient.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/ScrollHelper.java @@ -5,95 +5,27 @@ */ package org.elasticsearch.xpack.security; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.Client; -import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.node.Node; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.security.authc.Authentication; -import org.elasticsearch.xpack.security.user.User; -import org.elasticsearch.xpack.security.user.XPackUser; -import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Consumer; import java.util.function.Function; -import java.util.function.Supplier; -/** - * A special filter client for internal node communication which adds the internal xpack user to the headers. - * An optionally secured client for internal node communication. - * - * When secured, the XPack user is added to the execution context before each action is executed. - */ -public class InternalClient extends FilterClient { +public final class ScrollHelper { - private final String nodeName; - private final boolean securityEnabled; - private final User user; - - /** - * Constructs an InternalClient. - * If security is enabled the client is secure. Otherwise this client is a passthrough. - */ - public InternalClient(Settings settings, ThreadPool threadPool, Client in) { - this(settings, threadPool, in, XPackUser.INSTANCE); - } - - InternalClient(Settings settings, ThreadPool threadPool, Client in, User user) { - super(settings, threadPool, in); - this.nodeName = Node.NODE_NAME_SETTING.get(settings); - this.securityEnabled = XPackSettings.SECURITY_ENABLED.get(settings); - this.user = user; - } - - @Override - protected > void doExecute( - Action action, Request request, ActionListener listener) { - - if (securityEnabled) { - final ThreadContext threadContext = threadPool().getThreadContext(); - final Supplier storedContext = threadContext.newRestorableContext(true); - // we need to preserve the context here otherwise we execute the response with the XPack user which we can cause problems - // since we expect the callback to run with the authenticated user calling the doExecute method - try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { - processContext(threadContext); - super.doExecute(action, request, new ContextPreservingActionListener<>(storedContext, listener)); - } - } else { - super.doExecute(action, request, listener); - } - } - - protected void processContext(ThreadContext threadContext) { - try { - Authentication authentication = new Authentication(user, - new Authentication.RealmRef("__attach", "__attach", nodeName), null); - authentication.writeToContext(threadContext); - } catch (IOException ioe) { - throw new ElasticsearchException("failed to attach internal user to request", ioe); - } - } + private ScrollHelper() {} /** * This method fetches all results for the given search request, parses them using the given hit parser and calls the @@ -114,7 +46,8 @@ public class InternalClient extends FilterClient { }; // This function is MADNESS! But it works, don't think about it too hard... // simon edit: just watch this if you got this far https://www.youtube.com/watch?v=W-lF106Dgk8 - client.search(request, new ActionListener() { + client.search(request, new ContextPreservingActionListener<>(client.threadPool().getThreadContext().newRestorableContext(true), + new ActionListener() { private volatile SearchResponse lastResponse = null; @Override @@ -163,6 +96,6 @@ public class InternalClient extends FilterClient { } } } - }); + })); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java b/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java index 94dc96f138d..30597f8fa74 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -308,13 +308,12 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin, Clus return modules; } - public Collection createComponents(Client nodeClient, ThreadPool threadPool, ClusterService clusterService, + public Collection createComponents(Client client, ThreadPool threadPool, ClusterService clusterService, ResourceWatcherService resourceWatcherService, List extensions) throws Exception { if (enabled == false) { return Collections.emptyList(); } - final InternalSecurityClient client = new InternalSecurityClient(settings, threadPool, nodeClient); threadContext.set(threadPool.getThreadContext()); List components = new ArrayList<>(); securityContext.set(new SecurityContext(settings, threadPool.getThreadContext())); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java index d700c92cdf9..6ba1a6a68d8 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -57,7 +58,7 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust private final IndexLifecycleManager securityIndex; public SecurityLifecycleService(Settings settings, ClusterService clusterService, - ThreadPool threadPool, InternalSecurityClient client, + ThreadPool threadPool, Client client, @Nullable IndexAuditTrail indexAuditTrail) { super(settings); this.settings = settings; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java index 8a9140baa76..a106a21aa13 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java @@ -71,7 +71,9 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil } @Override - public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public void apply(Task task, String action, Request request, + ActionListener listener, + ActionFilterChain chain) { /* A functional requirement - when the license of security is disabled (invalid/expires), security will continue @@ -85,11 +87,11 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil } if (licenseState.isAuthAllowed()) { - final boolean useSystemUser = AuthorizationUtils.shouldReplaceUserWithSystem(threadContext, action); - final ActionListener contextPreservingListener = + final ActionListener contextPreservingListener = ContextPreservingActionListener.wrapPreservingContext(listener, threadContext); ActionListener authenticatedListener = ActionListener.wrap( (aVoid) -> chain.proceed(task, action, request, contextPreservingListener), contextPreservingListener::onFailure); + final boolean useSystemUser = AuthorizationUtils.shouldReplaceUserWithSystem(threadContext, action); try { if (useSystemUser) { securityContext.executeAsUser(SystemUser.INSTANCE, (original) -> { @@ -99,6 +101,14 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil listener.onFailure(e); } }, Version.CURRENT); + } else if (AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadContext)) { + AuthorizationUtils.switchUserBasedOnActionOriginAndExecute(threadContext, securityContext, (original) -> { + try { + applyInternal(action, request, authenticatedListener); + } catch (IOException e) { + listener.onFailure(e); + } + }); } else { try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(true)) { applyInternal(action, request, authenticatedListener); @@ -119,7 +129,8 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil return Integer.MIN_VALUE; } - private void applyInternal(String action, final ActionRequest request, ActionListener listener) throws IOException { + private void applyInternal(String action, Request request, + ActionListener listener) throws IOException { if (CloseIndexAction.NAME.equals(action) || OpenIndexAction.NAME.equals(action) || DeleteIndexAction.NAME.equals(action)) { IndicesRequest indicesRequest = (IndicesRequest) request; try { @@ -145,7 +156,8 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil ActionListener.wrap((authc) -> authorizeRequest(authc, securityAction, request, listener), listener::onFailure)); } - void authorizeRequest(Authentication authentication, String securityAction, ActionRequest request, ActionListener listener) { + private void authorizeRequest(Authentication authentication, String securityAction, Request request, + ActionListener listener) { if (authentication == null) { listener.onFailure(new IllegalArgumentException("authentication must be non null for authorization")); } else { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java b/plugin/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java index 1c33b632afa..b97fc782a61 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.security.audit.index; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; @@ -46,7 +45,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportMessage; import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.authc.AuthenticationToken; @@ -82,6 +80,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.clientWithOrigin; import static org.elasticsearch.xpack.security.Security.setting; import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_DENIED; import static org.elasticsearch.xpack.security.audit.AuditLevel.ACCESS_GRANTED; @@ -175,7 +175,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail { return NAME; } - public IndexAuditTrail(Settings settings, InternalSecurityClient client, ThreadPool threadPool, ClusterService clusterService) { + public IndexAuditTrail(Settings settings, Client client, ThreadPool threadPool, ClusterService clusterService) { super(settings); this.threadPool = threadPool; this.clusterService = clusterService; @@ -189,7 +189,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail { if (indexToRemoteCluster == false) { // in the absence of client settings for remote indexing, fall back to the client that was passed in. - this.client = client; + this.client = clientWithOrigin(client, SECURITY_ORIGIN); } else { this.client = initializeRemoteClient(settings, logger); } @@ -932,9 +932,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail { @Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { - logger.error( - (Supplier) () -> new ParameterizedMessage( - "failed to bulk index audit events: [{}]", failure.getMessage()), failure); + logger.error(new ParameterizedMessage("failed to bulk index audit events: [{}]", failure.getMessage()), failure); } }).setBulkActions(bulkSize) .setFlushInterval(interval) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java index 2f9145da1f6..ddf4cecbc16 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java @@ -7,37 +7,38 @@ package org.elasticsearch.xpack.security.authc; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.index.reindex.DeleteByQueryAction; -import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.Client; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.DeleteByQueryAction; +import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.SecurityLifecycleService; import java.time.Instant; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; +import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; /** * Responsible for cleaning the invalidated tokens from the invalidated tokens index. */ final class ExpiredTokenRemover extends AbstractRunnable { - private final InternalSecurityClient client; + private final Client client; private final AtomicBoolean inProgress = new AtomicBoolean(false); private final Logger logger; private final TimeValue timeout; - ExpiredTokenRemover(Settings settings, InternalSecurityClient internalClient) { - this.client = internalClient; + ExpiredTokenRemover(Settings settings, Client client) { + this.client = client; this.logger = Loggers.getLogger(getClass(), settings); this.timeout = TokenService.DELETE_TIMEOUT.get(settings); } @@ -54,13 +55,14 @@ final class ExpiredTokenRemover extends AbstractRunnable { .query(QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("doc_type", TokenService.DOC_TYPE)) .filter(QueryBuilders.rangeQuery("expiration_time").lte(Instant.now().toEpochMilli()))); - client.execute(DeleteByQueryAction.INSTANCE, dbq, ActionListener.wrap(r -> markComplete(), - e -> { - if (isShardNotAvailableException(e) == false) { - logger.error("failed to delete expired tokens", e); - } - markComplete(); - })); + executeAsyncWithOrigin(client, SECURITY_ORIGIN, DeleteByQueryAction.INSTANCE, dbq, + ActionListener.wrap(r -> markComplete(), + e -> { + if (isShardNotAvailableException(e) == false) { + logger.error("failed to delete expired tokens", e); + } + markComplete(); + })); } void submit(ThreadPool threadPool) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index a907c722df3..0b59231a6a1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.authc; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.IOUtils; @@ -22,6 +21,7 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.AckedRequest; @@ -36,7 +36,6 @@ import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -49,8 +48,6 @@ import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.SecurityLifecycleService; import javax.crypto.Cipher; @@ -89,6 +86,8 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; +import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; /** * Service responsible for the creation, validation, and other management of {@link UserToken} @@ -133,7 +132,7 @@ public final class TokenService extends AbstractComponent { private final Clock clock; private final TimeValue expirationDelay; private final TimeValue deleteInterval; - private final InternalSecurityClient internalClient; + private final Client client; private final SecurityLifecycleService lifecycleService; private final ExpiredTokenRemover expiredTokenRemover; private final boolean enabled; @@ -147,9 +146,9 @@ public final class TokenService extends AbstractComponent { * Creates a new token service * @param settings the node settings * @param clock the clock that will be used for comparing timestamps - * @param internalClient the client to use when checking for revocations + * @param client the client to use when checking for revocations */ - public TokenService(Settings settings, Clock clock, InternalSecurityClient internalClient, + public TokenService(Settings settings, Clock clock, Client client, SecurityLifecycleService lifecycleService, ClusterService clusterService) throws GeneralSecurityException { super(settings); byte[] saltArr = new byte[SALT_BYTES]; @@ -158,12 +157,12 @@ public final class TokenService extends AbstractComponent { final SecureString tokenPassphrase = generateTokenKey(); this.clock = clock.withZone(ZoneOffset.UTC); this.expirationDelay = TOKEN_EXPIRATION.get(settings); - this.internalClient = internalClient; + this.client = client; this.lifecycleService = lifecycleService; - this.lastExpirationRunMs = internalClient.threadPool().relativeTimeInMillis(); + this.lastExpirationRunMs = client.threadPool().relativeTimeInMillis(); this.deleteInterval = DELETE_INTERVAL.get(settings); this.enabled = XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.get(settings); - this.expiredTokenRemover = new ExpiredTokenRemover(settings, internalClient); + this.expiredTokenRemover = new ExpiredTokenRemover(settings, client); this.currentVersionBytes = ByteBuffer.allocate(4).putInt(TOKEN_SERVICE_VERSION.id).array(); ensureEncryptionCiphersSupported(); KeyAndCache keyAndCache = new KeyAndCache(new KeyAndTimestamp(tokenPassphrase.clone(), createdTimeStamps.incrementAndGet()), @@ -249,7 +248,7 @@ public final class TokenService extends AbstractComponent { * request(s) that require a key computation will be delayed and there will be * some additional latency. */ - internalClient.threadPool().executor(THREAD_POOL_NAME) + client.threadPool().executor(THREAD_POOL_NAME) .submit(new KeyComputingRunnable(in, iv, version, decodedSalt, listener, keyAndCache)); } } else { @@ -293,26 +292,27 @@ public final class TokenService extends AbstractComponent { } else { final String id = getDocumentId(userToken); lifecycleService.createIndexIfNeededThenExecute(listener, () -> { - internalClient.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, id) - .setOpType(OpType.CREATE) - .setSource("doc_type", DOC_TYPE, "expiration_time", getExpirationTime().toEpochMilli()) - .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) - .execute(new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - listener.onResponse(indexResponse.getResult() == Result.CREATED); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof VersionConflictEngineException) { - // doc already exists - listener.onResponse(false); - } else { - listener.onFailure(e); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, id) + .setOpType(OpType.CREATE) + .setSource("doc_type", DOC_TYPE, "expiration_time", getExpirationTime().toEpochMilli()) + .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).request(), + new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + listener.onResponse(indexResponse.getResult() == Result.CREATED); } - } - }); + + @Override + public void onFailure(Exception e) { + if (e instanceof VersionConflictEngineException) { + // doc already exists + listener.onResponse(false); + } else { + listener.onFailure(e); + } + } + }, client::index); }); } }, listener::onFailure)); @@ -345,8 +345,9 @@ public final class TokenService extends AbstractComponent { "the upgrade API is run on the security index")); return; } - internalClient.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, getDocumentId(userToken)) - .execute(new ActionListener() { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, TYPE, getDocumentId(userToken)).request(), + new ActionListener() { @Override public void onResponse(GetResponse response) { @@ -370,7 +371,7 @@ public final class TokenService extends AbstractComponent { listener.onFailure(e); } } - }); + }, client::get); } else if (lifecycleService.isSecurityIndexExisting()) { // index exists but the index isn't available, do not trust the token logger.warn("could not validate token as the security index is not available"); @@ -391,9 +392,9 @@ public final class TokenService extends AbstractComponent { private void maybeStartTokenRemover() { if (lifecycleService.isSecurityIndexAvailable()) { - if (internalClient.threadPool().relativeTimeInMillis() - lastExpirationRunMs > deleteInterval.getMillis()) { - expiredTokenRemover.submit(internalClient.threadPool()); - lastExpirationRunMs = internalClient.threadPool().relativeTimeInMillis(); + if (client.threadPool().relativeTimeInMillis() - lastExpirationRunMs > deleteInterval.getMillis()) { + expiredTokenRemover.submit(client.threadPool()); + lastExpirationRunMs = client.threadPool().relativeTimeInMillis(); } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 715cfbcec73..f28736efc03 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -6,20 +6,20 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -28,6 +28,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.DocumentMissingException; @@ -35,8 +36,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; +import org.elasticsearch.xpack.security.ScrollHelper; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheRequest; import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheResponse; @@ -58,6 +58,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Consumer; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; /** * NativeUsersStore is a store for users that reads from an Elasticsearch index. This store is responsible for fetching the full @@ -74,12 +79,12 @@ public class NativeUsersStore extends AbstractComponent { private final Hasher hasher = Hasher.BCRYPT; - private final InternalSecurityClient client; + private final Client client; private final boolean isTribeNode; private volatile SecurityLifecycleService securityLifecycleService; - public NativeUsersStore(Settings settings, InternalSecurityClient client, SecurityLifecycleService securityLifecycleService) { + public NativeUsersStore(Settings settings, Client client, SecurityLifecycleService securityLifecycleService) { super(settings); this.client = client; this.isTribeNode = XPackPlugin.isTribeNode(settings); @@ -129,19 +134,22 @@ public class NativeUsersStore extends AbstractComponent { .map(s -> getIdForUser(USER_DOC_TYPE, s)).toArray(String[]::new); query = QueryBuilders.boolQuery().filter(QueryBuilders.idsQuery(INDEX_TYPE).addIds(users)); } - SearchRequest request = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) - .setQuery(query) - .setSize(1000) - .setFetchSource(true) - .request(); - request.indicesOptions().ignoreUnavailable(); - InternalClient.fetchAllByEntity(client, request, listener, (hit) -> { - UserAndPassword u = transformUser(hit.getId(), hit.getSourceAsMap()); - return u != null ? u.user() : null; - }); + final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { + SearchRequest request = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .setScroll(TimeValue.timeValueSeconds(10L)) + .setQuery(query) + .setSize(1000) + .setFetchSource(true) + .request(); + request.indicesOptions().ignoreUnavailable(); + ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), (hit) -> { + UserAndPassword u = transformUser(hit.getId(), hit.getSourceAsMap()); + return u != null ? u.user() : null; + }); + } } catch (Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("unable to retrieve users {}", Arrays.toString(userNames)), e); + logger.error(new ParameterizedMessage("unable to retrieve users {}", Arrays.toString(userNames)), e); listener.onFailure(e); } } @@ -158,33 +166,35 @@ public class NativeUsersStore extends AbstractComponent { return; } try { - GetRequest request = client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, - INDEX_TYPE, getIdForUser(USER_DOC_TYPE, user)).request(); - client.get(request, new ActionListener() { - @Override - public void onResponse(GetResponse response) { - listener.onResponse(transformUser(response.getId(), response.getSource())); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, + INDEX_TYPE, getIdForUser(USER_DOC_TYPE, user)).request(), + new ActionListener() { + @Override + public void onResponse(GetResponse response) { + listener.onResponse(transformUser(response.getId(), response.getSource())); + } - @Override - public void onFailure(Exception t) { - if (t instanceof IndexNotFoundException) { - logger.trace( - (Supplier) () -> new ParameterizedMessage( - "could not retrieve user [{}] because security index does not exist", user), t); - } else { - logger.error((Supplier) () -> new ParameterizedMessage("failed to retrieve user [{}]", user), t); - } - // We don't invoke the onFailure listener here, instead - // we call the response with a null user - listener.onResponse(null); - } - }); + @Override + public void onFailure(Exception t) { + if (t instanceof IndexNotFoundException) { + logger.trace( + (org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( + "could not retrieve user [{}] because security index does not exist", user), t); + } else { + logger.error(new ParameterizedMessage("failed to retrieve user [{}]", user), t); + } + // We don't invoke the onFailure listener here, instead + // we call the response with a null user + listener.onResponse(null); + } + }, client::get); } catch (IndexNotFoundException infe) { - logger.trace("could not retrieve user [{}] because security index does not exist", user); + logger.trace((org.apache.logging.log4j.util.Supplier) + () -> new ParameterizedMessage("could not retrieve user [{}] because security index does not exist", user)); listener.onResponse(null); } catch (Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("unable to retrieve user [{}]", user), e); + logger.error(new ParameterizedMessage("unable to retrieve user [{}]", user), e); listener.onFailure(e); } } @@ -217,34 +227,36 @@ public class NativeUsersStore extends AbstractComponent { docType = USER_DOC_TYPE; } - securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> - client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(docType, username)) - .setDoc(Requests.INDEX_CONTENT_TYPE, Fields.PASSWORD.getPreferredName(), String.valueOf(request.passwordHash())) - .setRefreshPolicy(request.getRefreshPolicy()) - .execute(new ActionListener() { - @Override - public void onResponse(UpdateResponse updateResponse) { - assert updateResponse.getResult() == DocWriteResponse.Result.UPDATED; - clearRealmCache(request.username(), listener, null); - } - - @Override - public void onFailure(Exception e) { - if (isIndexNotFoundOrDocumentMissing(e)) { - if (docType.equals(RESERVED_USER_TYPE)) { - createReservedUser(username, request.passwordHash(), request.getRefreshPolicy(), listener); - } else { - logger.debug((Supplier) () -> - new ParameterizedMessage("failed to change password for user [{}]", request.username()), e); - ValidationException validationException = new ValidationException(); - validationException.addValidationError("user must exist in order to change password"); - listener.onFailure(validationException); - } - } else { - listener.onFailure(e); + securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(docType, username)) + .setDoc(Requests.INDEX_CONTENT_TYPE, Fields.PASSWORD.getPreferredName(), String.valueOf(request.passwordHash())) + .setRefreshPolicy(request.getRefreshPolicy()).request(), + new ActionListener() { + @Override + public void onResponse(UpdateResponse updateResponse) { + assert updateResponse.getResult() == DocWriteResponse.Result.UPDATED; + clearRealmCache(request.username(), listener, null); } - } - })); + + @Override + public void onFailure(Exception e) { + if (isIndexNotFoundOrDocumentMissing(e)) { + if (docType.equals(RESERVED_USER_TYPE)) { + createReservedUser(username, request.passwordHash(), request.getRefreshPolicy(), listener); + } else { + logger.debug((org.apache.logging.log4j.util.Supplier) () -> + new ParameterizedMessage("failed to change password for user [{}]", request.username()), e); + ValidationException validationException = new ValidationException(); + validationException.addValidationError("user must exist in order to change password"); + listener.onFailure(validationException); + } + } else { + listener.onFailure(e); + } + } + }, client::update); + }); } /** @@ -258,22 +270,26 @@ public class NativeUsersStore extends AbstractComponent { "the upgrade API is run on the security index")); return; } - securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> - client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(RESERVED_USER_TYPE, username)) - .setSource(Fields.PASSWORD.getPreferredName(), String.valueOf(passwordHash), Fields.ENABLED.getPreferredName(), true, - Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE) - .setRefreshPolicy(refresh) - .execute(new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - clearRealmCache(username, listener, null); - } + securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, + getIdForUser(RESERVED_USER_TYPE, username)) + .setSource(Fields.PASSWORD.getPreferredName(), String.valueOf(passwordHash), + Fields.ENABLED.getPreferredName(), true, + Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE) + .setRefreshPolicy(refresh).request(), + new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + clearRealmCache(username, listener, null); + } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - })); + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, client::index); + }); } /** @@ -304,7 +320,7 @@ public class NativeUsersStore extends AbstractComponent { indexUser(request, listener); } } catch (Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("unable to put user [{}]", request.username()), e); + logger.error(new ParameterizedMessage("unable to put user [{}]", request.username()), e); listener.onFailure(e); } } @@ -316,68 +332,77 @@ public class NativeUsersStore extends AbstractComponent { assert putUserRequest.passwordHash() == null; assert !securityLifecycleService.isSecurityIndexOutOfDate() : "security index should be up to date"; // We must have an existing document - securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> - client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, - getIdForUser(USER_DOC_TYPE, putUserRequest.username())) - .setDoc(Requests.INDEX_CONTENT_TYPE, - Fields.USERNAME.getPreferredName(), putUserRequest.username(), - Fields.ROLES.getPreferredName(), putUserRequest.roles(), - Fields.FULL_NAME.getPreferredName(), putUserRequest.fullName(), - Fields.EMAIL.getPreferredName(), putUserRequest.email(), - Fields.METADATA.getPreferredName(), putUserRequest.metadata(), - Fields.ENABLED.getPreferredName(), putUserRequest.enabled(), - Fields.TYPE.getPreferredName(), USER_DOC_TYPE) - .setRefreshPolicy(putUserRequest.getRefreshPolicy()) - .execute(new ActionListener() { - @Override - public void onResponse(UpdateResponse updateResponse) { - assert updateResponse.getResult() == DocWriteResponse.Result.UPDATED; - clearRealmCache(putUserRequest.username(), listener, false); - } - - @Override - public void onFailure(Exception e) { - Exception failure = e; - if (isIndexNotFoundOrDocumentMissing(e)) { - // if the index doesn't exist we can never update a user - // if the document doesn't exist, then this update is not valid - logger.debug((Supplier) () -> new ParameterizedMessage("failed to update user document with username [{}]", - putUserRequest.username()), e); - ValidationException validationException = new ValidationException(); - validationException.addValidationError("password must be specified unless you are updating an existing user"); - failure = validationException; + securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, + getIdForUser(USER_DOC_TYPE, putUserRequest.username())) + .setDoc(Requests.INDEX_CONTENT_TYPE, + Fields.USERNAME.getPreferredName(), putUserRequest.username(), + Fields.ROLES.getPreferredName(), putUserRequest.roles(), + Fields.FULL_NAME.getPreferredName(), putUserRequest.fullName(), + Fields.EMAIL.getPreferredName(), putUserRequest.email(), + Fields.METADATA.getPreferredName(), putUserRequest.metadata(), + Fields.ENABLED.getPreferredName(), putUserRequest.enabled(), + Fields.TYPE.getPreferredName(), USER_DOC_TYPE) + .setRefreshPolicy(putUserRequest.getRefreshPolicy()) + .request(), + new ActionListener() { + @Override + public void onResponse(UpdateResponse updateResponse) { + assert updateResponse.getResult() == DocWriteResponse.Result.UPDATED; + clearRealmCache(putUserRequest.username(), listener, false); } - listener.onFailure(failure); - } - })); + + @Override + public void onFailure(Exception e) { + Exception failure = e; + if (isIndexNotFoundOrDocumentMissing(e)) { + // if the index doesn't exist we can never update a user + // if the document doesn't exist, then this update is not valid + logger.debug((org.apache.logging.log4j.util.Supplier) + () -> new ParameterizedMessage("failed to update user document with username [{}]", + putUserRequest.username()), e); + ValidationException validationException = new ValidationException(); + validationException + .addValidationError("password must be specified unless you are updating an existing user"); + failure = validationException; + } + listener.onFailure(failure); + } + }, client::update); + }); } private void indexUser(final PutUserRequest putUserRequest, final ActionListener listener) { assert putUserRequest.passwordHash() != null; assert !securityLifecycleService.isSecurityIndexOutOfDate() : "security index should be up to date"; - securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> - client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, + securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, putUserRequest.username())) - .setSource(Fields.USERNAME.getPreferredName(), putUserRequest.username(), - Fields.PASSWORD.getPreferredName(), String.valueOf(putUserRequest.passwordHash()), - Fields.ROLES.getPreferredName(), putUserRequest.roles(), - Fields.FULL_NAME.getPreferredName(), putUserRequest.fullName(), - Fields.EMAIL.getPreferredName(), putUserRequest.email(), - Fields.METADATA.getPreferredName(), putUserRequest.metadata(), - Fields.ENABLED.getPreferredName(), putUserRequest.enabled(), - Fields.TYPE.getPreferredName(), USER_DOC_TYPE) - .setRefreshPolicy(putUserRequest.getRefreshPolicy()) - .execute(new ActionListener() { - @Override - public void onResponse(IndexResponse updateResponse) { - clearRealmCache(putUserRequest.username(), listener, updateResponse.getResult() == DocWriteResponse.Result.CREATED); - } + .setSource(Fields.USERNAME.getPreferredName(), putUserRequest.username(), + Fields.PASSWORD.getPreferredName(), String.valueOf(putUserRequest.passwordHash()), + Fields.ROLES.getPreferredName(), putUserRequest.roles(), + Fields.FULL_NAME.getPreferredName(), putUserRequest.fullName(), + Fields.EMAIL.getPreferredName(), putUserRequest.email(), + Fields.METADATA.getPreferredName(), putUserRequest.metadata(), + Fields.ENABLED.getPreferredName(), putUserRequest.enabled(), + Fields.TYPE.getPreferredName(), USER_DOC_TYPE) + .setRefreshPolicy(putUserRequest.getRefreshPolicy()) + .request(), + new ActionListener() { + @Override + public void onResponse(IndexResponse updateResponse) { + clearRealmCache(putUserRequest.username(), listener, + updateResponse.getResult() == DocWriteResponse.Result.CREATED); + } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - })); + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, client::index); + }); } /** @@ -411,31 +436,37 @@ public class NativeUsersStore extends AbstractComponent { final ActionListener listener) { assert !securityLifecycleService.isSecurityIndexOutOfDate() : "security index should be up to date"; try { - securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> - client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, username)) - .setDoc(Requests.INDEX_CONTENT_TYPE, Fields.ENABLED.getPreferredName(), enabled) - .setRefreshPolicy(refreshPolicy) - .execute(new ActionListener() { - @Override - public void onResponse(UpdateResponse updateResponse) { - clearRealmCache(username, listener, null); - } - - @Override - public void onFailure(Exception e) { - Exception failure = e; - if (isIndexNotFoundOrDocumentMissing(e)) { - // if the index doesn't exist we can never update a user - // if the document doesn't exist, then this update is not valid - logger.debug((Supplier) () -> - new ParameterizedMessage("failed to {} user [{}]", enabled ? "enable" : "disable", username), e); - ValidationException validationException = new ValidationException(); - validationException.addValidationError("only existing users can be " + (enabled ? "enabled" : "disabled")); - failure = validationException; + securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, + getIdForUser(USER_DOC_TYPE, username)) + .setDoc(Requests.INDEX_CONTENT_TYPE, Fields.ENABLED.getPreferredName(), enabled) + .setRefreshPolicy(refreshPolicy) + .request(), + new ActionListener() { + @Override + public void onResponse(UpdateResponse updateResponse) { + clearRealmCache(username, listener, null); } - listener.onFailure(failure); - } - })); + + @Override + public void onFailure(Exception e) { + Exception failure = e; + if (isIndexNotFoundOrDocumentMissing(e)) { + // if the index doesn't exist we can never update a user + // if the document doesn't exist, then this update is not valid + logger.debug((org.apache.logging.log4j.util.Supplier) + () -> new ParameterizedMessage("failed to {} user [{}]", + enabled ? "enable" : "disable", username), e); + ValidationException validationException = new ValidationException(); + validationException.addValidationError("only existing users can be " + + (enabled ? "enabled" : "disabled")); + failure = validationException; + } + listener.onFailure(failure); + } + }, client::update); + }); } catch (Exception e) { listener.onFailure(e); } @@ -445,29 +476,33 @@ public class NativeUsersStore extends AbstractComponent { boolean clearCache, final ActionListener listener) { assert !securityLifecycleService.isSecurityIndexOutOfDate() : "security index should be up to date"; try { - securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> - client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(RESERVED_USER_TYPE, username)) - .setDoc(Requests.INDEX_CONTENT_TYPE, Fields.ENABLED.getPreferredName(), enabled) - .setUpsert(XContentType.JSON, - Fields.PASSWORD.getPreferredName(), "", - Fields.ENABLED.getPreferredName(), enabled, - Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE) - .setRefreshPolicy(refreshPolicy) - .execute(new ActionListener() { - @Override - public void onResponse(UpdateResponse updateResponse) { - if (clearCache) { - clearRealmCache(username, listener, null); - } else { - listener.onResponse(null); + securityLifecycleService.createIndexIfNeededThenExecute(listener, () -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareUpdate(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, + getIdForUser(RESERVED_USER_TYPE, username)) + .setDoc(Requests.INDEX_CONTENT_TYPE, Fields.ENABLED.getPreferredName(), enabled) + .setUpsert(XContentType.JSON, + Fields.PASSWORD.getPreferredName(), "", + Fields.ENABLED.getPreferredName(), enabled, + Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE) + .setRefreshPolicy(refreshPolicy) + .request(), + new ActionListener() { + @Override + public void onResponse(UpdateResponse updateResponse) { + if (clearCache) { + clearRealmCache(username, listener, null); + } else { + listener.onResponse(null); + } } - } - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - })); + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, client::update); + }); } catch (Exception e) { listener.onFailure(e); } @@ -493,7 +528,7 @@ public class NativeUsersStore extends AbstractComponent { INDEX_TYPE, getIdForUser(USER_DOC_TYPE, deleteUserRequest.username())).request(); request.indicesOptions().ignoreUnavailable(); request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy()); - client.delete(request, new ActionListener() { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { clearRealmCache(deleteUserRequest.username(), listener, @@ -504,7 +539,7 @@ public class NativeUsersStore extends AbstractComponent { public void onFailure(Exception e) { listener.onFailure(e); } - }); + }, client::delete); } catch (Exception e) { logger.error("unable to remove user", e); listener.onFailure(e); @@ -539,8 +574,10 @@ public class NativeUsersStore extends AbstractComponent { "the upgrade API is run on the security index")); return; } - client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(RESERVED_USER_TYPE, username)) - .execute(new ActionListener() { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(RESERVED_USER_TYPE, username)) + .request(), + new ActionListener() { @Override public void onResponse(GetResponse getResponse) { if (getResponse.isExists()) { @@ -565,17 +602,15 @@ public class NativeUsersStore extends AbstractComponent { @Override public void onFailure(Exception e) { if (e instanceof IndexNotFoundException) { - logger.trace((Supplier) () -> new ParameterizedMessage( + logger.trace((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( "could not retrieve built in user [{}] info since security index does not exist", username), e); listener.onResponse(null); } else { - logger.error( - (Supplier) () -> new ParameterizedMessage( - "failed to retrieve built in user [{}] info", username), e); + logger.error(new ParameterizedMessage("failed to retrieve built in user [{}] info", username), e); listener.onFailure(null); } } - }); + }, client::get); } void getAllReservedUserInfo(ActionListener> listener) { @@ -585,22 +620,23 @@ public class NativeUsersStore extends AbstractComponent { "the upgrade API is run on the security index")); return; } - client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) - .setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE)) - .setFetchSource(true) - .execute(new ActionListener() { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE)) + .setFetchSource(true).request(), + new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { Map userInfos = new HashMap<>(); - assert searchResponse.getHits().getTotalHits() <= 10 : "there are more than 10 reserved users we need to change " + - "this to retrieve them all!"; + assert searchResponse.getHits().getTotalHits() <= 10 : + "there are more than 10 reserved users we need to change this to retrieve them all!"; for (SearchHit searchHit : searchResponse.getHits().getHits()) { Map sourceMap = searchHit.getSourceAsMap(); String password = (String) sourceMap.get(Fields.PASSWORD.getPreferredName()); Boolean enabled = (Boolean) sourceMap.get(Fields.ENABLED.getPreferredName()); final String id = searchHit.getId(); assert id != null && id.startsWith(RESERVED_USER_TYPE) : - "id [" + id + "] does not start with reserved-user prefix"; + "id [" + id + "] does not start with reserved-user prefix"; final String username = id.substring(RESERVED_USER_TYPE.length() + 1); if (password == null) { listener.onFailure(new IllegalStateException("password hash must not be null!")); @@ -625,27 +661,28 @@ public class NativeUsersStore extends AbstractComponent { listener.onFailure(e); } } - }); + }, client::search); } private void clearRealmCache(String username, ActionListener listener, Response response) { SecurityClient securityClient = new SecurityClient(client); ClearRealmCacheRequest request = securityClient.prepareClearRealmCache() .usernames(username).request(); - securityClient.clearRealmCache(request, new ActionListener() { - @Override - public void onResponse(ClearRealmCacheResponse nodes) { - listener.onResponse(response); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, + new ActionListener() { + @Override + public void onResponse(ClearRealmCacheResponse nodes) { + listener.onResponse(response); + } - @Override - public void onFailure(Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("unable to clear realm cache for user [{}]", username), e); - ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + username - + "] failed. please clear the realm cache manually", e); - listener.onFailure(exception); - } - }); + @Override + public void onFailure(Exception e) { + logger.error(new ParameterizedMessage("unable to clear realm cache for user [{}]", username), e); + ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + username + + "] failed. please clear the realm cache manually", e); + listener.onFailure(exception); + } + }, securityClient::clearRealmCache); } @Nullable @@ -668,7 +705,7 @@ public class NativeUsersStore extends AbstractComponent { Map metadata = (Map) sourceMap.get(Fields.METADATA.getPreferredName()); return new UserAndPassword(new User(username, roles, fullName, email, metadata, enabled), password.toCharArray()); } catch (Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("error in the format of data for user [{}]", username), e); + logger.error(new ParameterizedMessage("error in the format of data for user [{}]", username), e); return null; } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 36fdc3e7816..23fd13d8225 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -5,6 +5,36 @@ */ package org.elasticsearch.xpack.security.authc.support.mapper; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.ContextPreservingActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.security.ScrollHelper; +import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheResponse; +import org.elasticsearch.xpack.security.action.rolemapping.DeleteRoleMappingRequest; +import org.elasticsearch.xpack.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; +import org.elasticsearch.xpack.security.client.SecurityClient; + import java.io.IOException; import java.util.Arrays; import java.util.Collection; @@ -15,40 +45,16 @@ import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; -import org.elasticsearch.xpack.security.SecurityLifecycleService; -import org.elasticsearch.xpack.security.action.rolemapping.DeleteRoleMappingRequest; -import org.elasticsearch.xpack.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; -import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; -import org.elasticsearch.xpack.security.client.SecurityClient; - import static org.elasticsearch.action.DocWriteResponse.Result.CREATED; import static org.elasticsearch.action.DocWriteResponse.Result.DELETED; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; /** @@ -71,12 +77,12 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol private static final String SECURITY_GENERIC_TYPE = "doc"; - private final InternalSecurityClient client; + private final Client client; private final boolean isTribeNode; private final SecurityLifecycleService securityLifecycleService; private final List realmsToRefresh = new CopyOnWriteArrayList<>(); - public NativeRoleMappingStore(Settings settings, InternalSecurityClient client, SecurityLifecycleService securityLifecycleService) { + public NativeRoleMappingStore(Settings settings, Client client, SecurityLifecycleService securityLifecycleService) { super(settings); this.client = client; this.isTribeNode = XPackPlugin.isTribeNode(settings); @@ -104,22 +110,26 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol return; } final QueryBuilder query = QueryBuilders.termQuery(DOC_TYPE_FIELD, DOC_TYPE_ROLE_MAPPING); - SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) - .setTypes(SECURITY_GENERIC_TYPE) - .setQuery(query) - .setSize(1000) - .setFetchSource(true) - .request(); - request.indicesOptions().ignoreUnavailable(); - InternalClient.fetchAllByEntity(client, request, ActionListener.wrap((Collection mappings) -> - listener.onResponse(mappings.stream().filter(Objects::nonNull).collect(Collectors.toList())), - ex -> { - logger.error(new ParameterizedMessage("failed to load role mappings from index [{}] skipping all mappings.", - SECURITY_INDEX_NAME), ex); - listener.onResponse(Collections.emptyList()); - }), - doc -> buildMapping(getNameFromId(doc.getId()), doc.getSourceRef())); + final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { + SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) + .setScroll(TimeValue.timeValueSeconds(10L)) + .setTypes(SECURITY_GENERIC_TYPE) + .setQuery(query) + .setSize(1000) + .setFetchSource(true) + .request(); + request.indicesOptions().ignoreUnavailable(); + ScrollHelper.fetchAllByEntity(client, request, + new ContextPreservingActionListener<>(supplier, ActionListener.wrap((Collection mappings) -> + listener.onResponse(mappings.stream().filter(Objects::nonNull).collect(Collectors.toList())), + ex -> { + logger.error(new ParameterizedMessage("failed to load role mappings from index [{}] skipping all mappings.", + SECURITY_INDEX_NAME), ex); + listener.onResponse(Collections.emptyList()); + })), + doc -> buildMapping(getNameFromId(doc.getId()), doc.getSourceRef())); + } } private ExpressionRoleMapping buildMapping(String id, BytesReference source) { @@ -179,22 +189,24 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol listener.onFailure(e); return; } - client.prepareIndex(SECURITY_INDEX_NAME, SECURITY_GENERIC_TYPE, getIdForName(mapping.getName())) - .setSource(xContentBuilder) - .setRefreshPolicy(request.getRefreshPolicy()) - .execute(new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - boolean created = indexResponse.getResult() == CREATED; - listener.onResponse(created); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareIndex(SECURITY_INDEX_NAME, SECURITY_GENERIC_TYPE, getIdForName(mapping.getName())) + .setSource(xContentBuilder) + .setRefreshPolicy(request.getRefreshPolicy()) + .request(), + new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + boolean created = indexResponse.getResult() == CREATED; + listener.onResponse(created); + } - @Override - public void onFailure(Exception e) { - logger.error(new ParameterizedMessage("failed to put role-mapping [{}]", mapping.getName()), e); - listener.onFailure(e); - } - }); + @Override + public void onFailure(Exception e) { + logger.error(new ParameterizedMessage("failed to put role-mapping [{}]", mapping.getName()), e); + listener.onFailure(e); + } + }, client::index); }); } @@ -205,9 +217,11 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol "the upgrade API is run on the security index")); return; } - client.prepareDelete(SECURITY_INDEX_NAME, SECURITY_GENERIC_TYPE, getIdForName(request.getName())) - .setRefreshPolicy(request.getRefreshPolicy()) - .execute(new ActionListener() { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareDelete(SECURITY_INDEX_NAME, SECURITY_GENERIC_TYPE, getIdForName(request.getName())) + .setRefreshPolicy(request.getRefreshPolicy()) + .request(), + new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { @@ -221,7 +235,7 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol listener.onFailure(e); } - }); + }, client::delete); } /** @@ -293,17 +307,20 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol private void refreshRealms(ActionListener listener, Result result) { String[] realmNames = this.realmsToRefresh.toArray(new String[realmsToRefresh.size()]); - new SecurityClient(this.client).prepareClearRealmCache().realms(realmNames).execute(ActionListener.wrap( - response -> { - logger.debug((Supplier) () -> new ParameterizedMessage( - "Cleared cached in realms [{}] due to role mapping change", Arrays.toString(realmNames))); - listener.onResponse(result); - }, - ex -> { - logger.warn("Failed to clear cache for realms [{}]", Arrays.toString(realmNames)); - listener.onFailure(ex); - }) - ); + final SecurityClient securityClient = new SecurityClient(client); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + securityClient.prepareClearRealmCache().realms(realmNames).request(), + ActionListener.wrap( + response -> { + logger.debug((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( + "Cleared cached in realms [{}] due to role mapping change", Arrays.toString(realmNames))); + listener.onResponse(result); + }, + ex -> { + logger.warn("Failed to clear cache for realms [{}]", Arrays.toString(realmNames)); + listener.onFailure(ex); + }), + securityClient::clearRealmCache); } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index ef43da20eb0..8a2c4f72a18 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -5,17 +5,30 @@ */ package org.elasticsearch.xpack.security.authz; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.xpack.ClientHelper; +import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authz.permission.Role; import org.elasticsearch.xpack.security.support.Automatons; import org.elasticsearch.xpack.security.user.SystemUser; +import org.elasticsearch.xpack.security.user.XPackSecurityUser; +import org.elasticsearch.xpack.security.user.XPackUser; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Predicate; +import static org.elasticsearch.xpack.ClientHelper.DEPRECATION_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.MONITORING_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.PERSISTENT_TASK_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; + public final class AuthorizationUtils { private static final Predicate INTERNAL_PREDICATE = Automatons.predicate("internal:*"); @@ -38,26 +51,71 @@ public final class AuthorizationUtils { * @return true if the system user should be used to execute a request */ public static boolean shouldReplaceUserWithSystem(ThreadContext threadContext, String action) { + // the action must be internal OR the thread context must be a system context. if (threadContext.isSystemContext() == false && isInternalAction(action) == false) { return false; } + // there is no authentication object AND we are executing in a system context OR an internal action + // AND there Authentication authentication = threadContext.getTransient(Authentication.AUTHENTICATION_KEY); - if (authentication == null) { + if (authentication == null && threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME) == null) { return true; } - // we have a internal action being executed by a user that is not the system user, lets verify that there is a - // originating action that is not a internal action + // we have a internal action being executed by a user other than the system user, lets verify that there is a + // originating action that is not a internal action. We verify that there must be a originating action as an + // internal action should never be called by user code from a client final String originatingAction = threadContext.getTransient(AuthorizationService.ORIGINATING_ACTION_KEY); if (originatingAction != null && isInternalAction(originatingAction) == false) { return true; } - // either there was no originating action or it was a internal action, we should not replace under these circumstances + // either there was no originating action or the originating action was an internal action, + // we should not replace under these circumstances return false; } + /** + * Returns true if the thread context contains the origin of the action and does not have any authentication + */ + public static boolean shouldSetUserBasedOnActionOrigin(ThreadContext context) { + final String actionOrigin = context.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME); + final Authentication authentication = context.getTransient(Authentication.AUTHENTICATION_KEY); + return actionOrigin != null && authentication == null; + } + + /** + * Stashes the current context and executes the consumer as the proper user based on the origin of the action. + * + * This method knows nothing about listeners so it is important that callers ensure their listeners preserve their + * context and restore it appropriately. + */ + public static void switchUserBasedOnActionOriginAndExecute(ThreadContext threadContext, SecurityContext securityContext, + Consumer consumer) { + final String actionOrigin = threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME); + if (actionOrigin == null) { + assert false : "cannot switch user if there is no action origin"; + throw new IllegalStateException("cannot switch user if there is no action origin"); + } + + switch (actionOrigin) { + case SECURITY_ORIGIN: + securityContext.executeAsUser(XPackSecurityUser.INSTANCE, consumer, Version.CURRENT); + break; + case WATCHER_ORIGIN: + case ML_ORIGIN: + case MONITORING_ORIGIN: + case DEPRECATION_ORIGIN: + case PERSISTENT_TASK_ORIGIN: + securityContext.executeAsUser(XPackUser.INSTANCE, consumer, Version.CURRENT); + break; + default: + assert false : "action.origin [" + actionOrigin + "] is unknown!"; + throw new IllegalStateException("action.origin [" + actionOrigin + "] should always be a known value"); + } + } + private static boolean isInternalAction(String action) { return INTERNAL_PREDICATE.test(action); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index dec218428e4..f6ac62e4fbb 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -7,19 +7,19 @@ package org.elasticsearch.xpack.security.authz.store; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.MultiSearchResponse.Item; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.TransportActions; +import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -37,8 +38,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; +import org.elasticsearch.xpack.security.ScrollHelper; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.action.role.ClearRolesCacheRequest; import org.elasticsearch.xpack.security.action.role.ClearRolesCacheResponse; @@ -57,9 +57,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Supplier; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.security.Security.setting; import static org.elasticsearch.xpack.security.authz.RoleDescriptor.ROLE_TYPE; @@ -80,14 +84,14 @@ public class NativeRolesStore extends AbstractComponent { TimeValue.timeValueMinutes(20), Property.NodeScope, Property.Deprecated); private static final String ROLE_DOC_TYPE = "doc"; - private final InternalSecurityClient client; + private final Client client; private final XPackLicenseState licenseState; private final boolean isTribeNode; private SecurityClient securityClient; private final SecurityLifecycleService securityLifecycleService; - public NativeRolesStore(Settings settings, InternalSecurityClient client, XPackLicenseState licenseState, + public NativeRolesStore(Settings settings, Client client, XPackLicenseState licenseState, SecurityLifecycleService securityLifecycleService) { super(settings); this.client = client; @@ -118,15 +122,18 @@ public class NativeRolesStore extends AbstractComponent { final String[] roleNames = Arrays.stream(names).map(s -> getIdForUser(s)).toArray(String[]::new); query = QueryBuilders.boolQuery().filter(QueryBuilders.idsQuery(ROLE_DOC_TYPE).addIds(roleNames)); } - SearchRequest request = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) - .setQuery(query) - .setSize(1000) - .setFetchSource(true) - .request(); - request.indicesOptions().ignoreUnavailable(); - InternalClient.fetchAllByEntity(client, request, listener, - (hit) -> transformRole(hit.getId(), hit.getSourceRef(), logger, licenseState)); + final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { + SearchRequest request = client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .setScroll(TimeValue.timeValueSeconds(10L)) + .setQuery(query) + .setSize(1000) + .setFetchSource(true) + .request(); + request.indicesOptions().ignoreUnavailable(); + ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), + (hit) -> transformRole(hit.getId(), hit.getSourceRef(), logger, licenseState)); + } } catch (Exception e) { logger.error(new ParameterizedMessage("unable to retrieve roles {}", Arrays.toString(names)), e); listener.onFailure(e); @@ -153,18 +160,20 @@ public class NativeRolesStore extends AbstractComponent { DeleteRequest request = client.prepareDelete(SecurityLifecycleService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(deleteRoleRequest.name())).request(); request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy()); - client.delete(request, new ActionListener() { - @Override - public void onResponse(DeleteResponse deleteResponse) { - clearRoleCache(deleteRoleRequest.name(), listener, deleteResponse.getResult() == DocWriteResponse.Result.DELETED); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, + new ActionListener() { + @Override + public void onResponse(DeleteResponse deleteResponse) { + clearRoleCache(deleteRoleRequest.name(), listener, + deleteResponse.getResult() == DocWriteResponse.Result.DELETED); + } - @Override - public void onFailure(Exception e) { - logger.error("failed to delete role from the index", e); - listener.onFailure(e); - } - }); + @Override + public void onFailure(Exception e) { + logger.error("failed to delete role from the index", e); + listener.onFailure(e); + } + }, client::delete); } catch (IndexNotFoundException e) { logger.trace("security index does not exist", e); listener.onResponse(false); @@ -206,25 +215,27 @@ public class NativeRolesStore extends AbstractComponent { listener.onFailure(e); return; } - client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role.getName())) - .setSource(xContentBuilder) - .setRefreshPolicy(request.getRefreshPolicy()) - .execute(new ActionListener() { - @Override - public void onResponse(IndexResponse indexResponse) { - final boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; - clearRoleCache(role.getName(), listener, created); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareIndex(SecurityLifecycleService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role.getName())) + .setSource(xContentBuilder) + .setRefreshPolicy(request.getRefreshPolicy()) + .request(), + new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + final boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; + clearRoleCache(role.getName(), listener, created); + } - @Override - public void onFailure(Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("failed to put role [{}]", request.name()), e); - listener.onFailure(e); - } - }); + @Override + public void onFailure(Exception e) { + logger.error(new ParameterizedMessage("failed to put role [{}]", request.name()), e); + listener.onFailure(e); + } + }, client::index); }); } catch (Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("unable to put role [{}]", request.name()), e); + logger.error(new ParameterizedMessage("unable to put role [{}]", request.name()), e); listener.onFailure(e); } } @@ -243,27 +254,29 @@ public class NativeRolesStore extends AbstractComponent { "the upgrade API is run on the security index")); return; } - client.prepareMultiSearch() - .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) - .setQuery(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) - .setSize(0)) - .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) - .setQuery(QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) - .must(QueryBuilders.boolQuery() - .should(existsQuery("indices.field_security.grant")) - .should(existsQuery("indices.field_security.except")) - // for backwardscompat with 2.x - .should(existsQuery("indices.fields")))) - .setSize(0) - .setTerminateAfter(1)) - .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) - .setQuery(QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) - .filter(existsQuery("indices.query"))) - .setSize(0) - .setTerminateAfter(1)) - .execute(new ActionListener() { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareMultiSearch() + .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .setQuery(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) + .setSize(0)) + .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .setQuery(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) + .must(QueryBuilders.boolQuery() + .should(existsQuery("indices.field_security.grant")) + .should(existsQuery("indices.field_security.except")) + // for backwardscompat with 2.x + .should(existsQuery("indices.fields")))) + .setSize(0) + .setTerminateAfter(1)) + .add(client.prepareSearch(SecurityLifecycleService.SECURITY_INDEX_NAME) + .setQuery(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) + .filter(existsQuery("indices.query"))) + .setSize(0) + .setTerminateAfter(1)) + .request(), + new ActionListener() { @Override public void onResponse(MultiSearchResponse items) { Item[] responses = items.getResponses(); @@ -291,7 +304,7 @@ public class NativeRolesStore extends AbstractComponent { public void onFailure(Exception e) { listener.onFailure(e); } - }); + }, client::multiSearch); } } @@ -310,11 +323,11 @@ public class NativeRolesStore extends AbstractComponent { public void onFailure(Exception e) { // if the index or the shard is not there / available we just claim the role is not there if (TransportActions.isShardNotAvailableException(e)) { - logger.warn((Supplier) () -> new ParameterizedMessage("failed to load role [{}] index not available", - roleId), e); + logger.warn((org.apache.logging.log4j.util.Supplier) () -> + new ParameterizedMessage("failed to load role [{}] index not available", roleId), e); roleActionListener.onResponse(null); } else { - logger.error((Supplier) () -> new ParameterizedMessage("failed to load role [{}]", roleId), e); + logger.error(new ParameterizedMessage("failed to load role [{}]", roleId), e); roleActionListener.onFailure(e); } } @@ -329,13 +342,16 @@ public class NativeRolesStore extends AbstractComponent { "the upgrade API is run on the security index")); return; } + try { - GetRequest request = client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, - ROLE_DOC_TYPE, getIdForUser(role)).request(); - client.get(request, listener); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, + ROLE_DOC_TYPE, getIdForUser(role)).request(), + listener, + client::get); } catch (IndexNotFoundException e) { logger.trace( - (Supplier) () -> new ParameterizedMessage( + (org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( "unable to retrieve role [{}] since security index does not exist", role), e); listener.onResponse(new GetResponse( new GetResult(SecurityLifecycleService.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, @@ -348,20 +364,21 @@ public class NativeRolesStore extends AbstractComponent { private void clearRoleCache(final String role, ActionListener listener, Response response) { ClearRolesCacheRequest request = new ClearRolesCacheRequest().names(role); - securityClient.clearRolesCache(request, new ActionListener() { - @Override - public void onResponse(ClearRolesCacheResponse nodes) { - listener.onResponse(response); - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, + new ActionListener() { + @Override + public void onResponse(ClearRolesCacheResponse nodes) { + listener.onResponse(response); + } - @Override - public void onFailure(Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("unable to clear cache for role [{}]", role), e); - ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + role - + "] failed. please clear the role cache manually", e); - listener.onFailure(exception); - } - }); + @Override + public void onFailure(Exception e) { + logger.error(new ParameterizedMessage("unable to clear cache for role [{}]", role), e); + ElasticsearchException exception = new ElasticsearchException("clearing the cache for [" + role + + "] failed. please clear the role cache manually", e); + listener.onFailure(exception); + } + }, securityClient::clearRolesCache); } @Nullable @@ -407,7 +424,7 @@ public class NativeRolesStore extends AbstractComponent { } } catch (Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("error in the format of data for role [{}]", name), e); + logger.error(new ParameterizedMessage("error in the format of data for role [{}]", name), e); return null; } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/support/IndexLifecycleManager.java b/plugin/src/main/java/org/elasticsearch/xpack/security/support/IndexLifecycleManager.java index 11992ace209..1e49038839f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/support/IndexLifecycleManager.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/support/IndexLifecycleManager.java @@ -5,16 +5,6 @@ */ package org.elasticsearch.xpack.security.support; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.function.BiConsumer; -import java.util.function.Predicate; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; @@ -26,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterIndexHealth; @@ -37,11 +28,22 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.template.TemplateUtils; import org.elasticsearch.xpack.upgrade.IndexUpgradeCheck; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.BiConsumer; +import java.util.function.Predicate; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; +import static org.elasticsearch.xpack.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; /** @@ -57,14 +59,14 @@ public class IndexLifecycleManager extends AbstractComponent { private final String indexName; private final String templateName; - private final InternalSecurityClient client; + private final Client client; private final List> indexHealthChangeListeners = new CopyOnWriteArrayList<>(); private final List> indexOutOfDateListeners = new CopyOnWriteArrayList<>(); private volatile State indexState = new State(false, false, false, false, null); - public IndexLifecycleManager(Settings settings, InternalSecurityClient client, String indexName, String templateName) { + public IndexLifecycleManager(Settings settings, Client client, String indexName, String templateName) { super(settings); this.client = client; this.indexName = indexName; @@ -291,28 +293,29 @@ public class IndexLifecycleManager extends AbstractComponent { } else { CreateIndexRequest request = new CreateIndexRequest(INTERNAL_SECURITY_INDEX); request.alias(new Alias(SECURITY_INDEX_NAME)); - client.admin().indices().create(request, new ActionListener() { - @Override - public void onResponse(CreateIndexResponse createIndexResponse) { - if (createIndexResponse.isAcknowledged()) { - andThen.run(); - } else { - listener.onFailure(new ElasticsearchException("Failed to create security index")); - } - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, + new ActionListener() { + @Override + public void onResponse(CreateIndexResponse createIndexResponse) { + if (createIndexResponse.isAcknowledged()) { + andThen.run(); + } else { + listener.onFailure(new ElasticsearchException("Failed to create security index")); + } + } - @Override - public void onFailure(Exception e) { - final Throwable cause = ExceptionsHelper.unwrapCause(e); - if (cause instanceof ResourceAlreadyExistsException) { - // the index already exists - it was probably just created so this - // node hasn't yet received the cluster state update with the index - andThen.run(); - } else { - listener.onFailure(e); - } - } - }); + @Override + public void onFailure(Exception e) { + final Throwable cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof ResourceAlreadyExistsException) { + // the index already exists - it was probably just created so this + // node hasn't yet received the cluster state update with the index + andThen.run(); + } else { + listener.onFailure(e); + } + } + }, client.admin().indices()::create); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index 980df9c1185..b83dfcb7ed8 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -36,9 +36,7 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.AuthorizationUtils; import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4Transport; -import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.SystemUser; -import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.ssl.SSLService; import java.util.Collections; @@ -111,14 +109,11 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem securityContext.executeAsUser(SystemUser.INSTANCE, (original) -> sendWithUser(connection, action, request, options, new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original) , handler), sender), minVersion); - } else if (reservedRealmEnabled && connection.getVersion().before(Version.V_5_2_0) && - KibanaUser.NAME.equals(securityContext.getUser().principal())) { - final User kibanaUser = securityContext.getUser(); - final User bwcKibanaUser = new User(kibanaUser.principal(), new String[] { "kibana" }, kibanaUser.fullName(), - kibanaUser.email(), kibanaUser.metadata(), kibanaUser.enabled()); - securityContext.executeAsUser(bwcKibanaUser, (original) -> sendWithUser(connection, action, request, options, - new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original), - handler), sender), connection.getVersion()); + } else if (AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadPool.getThreadContext())) { + AuthorizationUtils.switchUserBasedOnActionOriginAndExecute(threadPool.getThreadContext(), securityContext, + (original) -> sendWithUser(connection, action, request, options, + new ContextRestoreResponseHandler<>(threadPool.getThreadContext().wrapRestorable(original) + , handler), sender)); } else if (securityContext.getAuthentication() != null && securityContext.getAuthentication().getVersion().equals(minVersion) == false) { // re-write the authentication since we want the authentication version to match the version of the connection diff --git a/plugin/src/main/java/org/elasticsearch/xpack/template/TemplateUtils.java b/plugin/src/main/java/org/elasticsearch/xpack/template/TemplateUtils.java index b2d5fa660a3..87ef0e5f653 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/template/TemplateUtils.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/template/TemplateUtils.java @@ -17,13 +17,15 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.NotXContentException; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.function.Predicate; import java.util.regex.Pattern; @@ -37,6 +39,20 @@ public class TemplateUtils { private TemplateUtils() {} + /** + * Loads a JSON template as a resource and puts it into the provided map + */ + public static void loadTemplateIntoMap(String resource, Map map, String templateName, String version, + String versionProperty, Logger logger) { + final String template = loadTemplate(resource, version, versionProperty); + try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, template)) { + map.put(templateName, IndexTemplateMetaData.Builder.fromXContent(parser, templateName)); + } catch (IOException e) { + // TODO: should we handle this with a thrown exception? + logger.error("Error loading template [{}] as part of metadata upgrading", templateName); + } + } + /** * Loads a built-in template and returns its source. */ @@ -89,6 +105,20 @@ public class TemplateUtils { .replaceAll(version); } + /** + * Checks if a versioned template exists, and if it exists checks if the version is greater than or equal to the current version. + * @param templateName Name of the index template + * @param state Cluster state + */ + public static boolean checkTemplateExistsAndVersionIsGTECurrentVersion(String templateName, ClusterState state) { + IndexTemplateMetaData templateMetaData = state.metaData().templates().get(templateName); + if (templateMetaData == null) { + return false; + } + + return templateMetaData.version() != null && templateMetaData.version() >= Version.CURRENT.id; + } + /** * Checks if a versioned template exists, and if it exists checks if it is up-to-date with current version. * @param versionKey The property in the mapping's _meta field which stores the version info diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckFactory.java index 775e205f255..804e1590251 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckFactory.java @@ -5,8 +5,8 @@ */ package org.elasticsearch.xpack.upgrade; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.xpack.security.InternalClient; import java.util.Collection; import java.util.Collections; @@ -30,6 +30,6 @@ public interface IndexUpgradeCheckFactory { *

* This method is called from {@link org.elasticsearch.plugins.Plugin#createComponents} method. */ - IndexUpgradeCheck createCheck(InternalClient internalClient, ClusterService clusterService); + IndexUpgradeCheck createCheck(Client client, ClusterService clusterService); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java index 627082f8bc5..ca61ba416ea 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java @@ -24,8 +24,6 @@ import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeAction; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction; import org.elasticsearch.xpack.upgrade.rest.RestIndexUpgradeAction; @@ -48,7 +46,7 @@ public class Upgrade implements ActionPlugin { private static final int EXPECTED_INDEX_FORMAT_VERSION = 6; private final Settings settings; - private final List> upgradeCheckFactories; + private final List> upgradeCheckFactories; public Upgrade(Settings settings) { this.settings = settings; @@ -58,10 +56,9 @@ public class Upgrade implements ActionPlugin { public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry) { - final InternalSecurityClient internalSecurityClient = new InternalSecurityClient(settings, threadPool, client); List upgradeChecks = new ArrayList<>(upgradeCheckFactories.size()); - for (BiFunction checkFactory : upgradeCheckFactories) { - upgradeChecks.add(checkFactory.apply(internalSecurityClient, clusterService)); + for (BiFunction checkFactory : upgradeCheckFactories) { + upgradeChecks.add(checkFactory.apply(client, clusterService)); } return Collections.singletonList(new IndexUpgradeService(settings, Collections.unmodifiableList(upgradeChecks))); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index f5d93725bc8..e28a09f978d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; @@ -50,7 +51,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.ssl.SSLService; import org.elasticsearch.xpack.watcher.actions.ActionFactory; import org.elasticsearch.xpack.watcher.actions.ActionRegistry; @@ -70,7 +70,6 @@ import org.elasticsearch.xpack.watcher.actions.slack.SlackAction; import org.elasticsearch.xpack.watcher.actions.slack.SlackActionFactory; import org.elasticsearch.xpack.watcher.actions.webhook.WebhookAction; import org.elasticsearch.xpack.watcher.actions.webhook.WebhookActionFactory; -import org.elasticsearch.xpack.watcher.client.WatcherClient; import org.elasticsearch.xpack.watcher.common.http.HttpClient; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.HttpSettings; @@ -245,7 +244,7 @@ public class Watcher implements ActionPlugin { } } - public Collection createComponents(Clock clock, ScriptService scriptService, InternalClient internalClient, + public Collection createComponents(Clock clock, ScriptService scriptService, Client client, XPackLicenseState licenseState, ThreadPool threadPool, ClusterService clusterService, NamedXContentRegistry xContentRegistry, SSLService sslService) { @@ -260,6 +259,8 @@ public class Watcher implements ActionPlugin { throw new UncheckedIOException(e); } + new WatcherIndexTemplateRegistry(settings, clusterService, threadPool, client); + // http client Map httpAuthFactories = new HashMap<>(); httpAuthFactories.put(BasicAuth.TYPE, new BasicAuthFactory(cryptoService)); @@ -295,14 +296,14 @@ public class Watcher implements ActionPlugin { final ConditionRegistry conditionRegistry = new ConditionRegistry(Collections.unmodifiableMap(parsers), clock); final Map transformFactories = new HashMap<>(); transformFactories.put(ScriptTransform.TYPE, new ScriptTransformFactory(settings, scriptService)); - transformFactories.put(SearchTransform.TYPE, new SearchTransformFactory(settings, internalClient, xContentRegistry, scriptService)); + transformFactories.put(SearchTransform.TYPE, new SearchTransformFactory(settings, client, xContentRegistry, scriptService)); final TransformRegistry transformRegistry = new TransformRegistry(settings, Collections.unmodifiableMap(transformFactories)); // actions final Map actionFactoryMap = new HashMap<>(); actionFactoryMap.put(EmailAction.TYPE, new EmailActionFactory(settings, emailService, templateEngine, emailAttachmentsParser)); actionFactoryMap.put(WebhookAction.TYPE, new WebhookActionFactory(settings, httpClient, httpTemplateParser, templateEngine)); - actionFactoryMap.put(IndexAction.TYPE, new IndexActionFactory(settings, internalClient)); + actionFactoryMap.put(IndexAction.TYPE, new IndexActionFactory(settings, client)); actionFactoryMap.put(LoggingAction.TYPE, new LoggingActionFactory(settings, templateEngine)); actionFactoryMap.put(HipChatAction.TYPE, new HipChatActionFactory(settings, templateEngine, hipChatService)); actionFactoryMap.put(JiraAction.TYPE, new JiraActionFactory(settings, templateEngine, jiraService)); @@ -313,16 +314,14 @@ public class Watcher implements ActionPlugin { // inputs final Map inputFactories = new HashMap<>(); inputFactories.put(SearchInput.TYPE, - new SearchInputFactory(settings, internalClient, xContentRegistry, scriptService)); + new SearchInputFactory(settings, client, xContentRegistry, scriptService)); inputFactories.put(SimpleInput.TYPE, new SimpleInputFactory(settings)); inputFactories.put(HttpInput.TYPE, new HttpInputFactory(settings, httpClient, templateEngine, httpTemplateParser)); inputFactories.put(NoneInput.TYPE, new NoneInputFactory(settings)); final InputRegistry inputRegistry = new InputRegistry(settings, inputFactories); inputFactories.put(ChainInput.TYPE, new ChainInputFactory(settings, inputRegistry)); - final WatcherClient watcherClient = new WatcherClient(internalClient); - - final HistoryStore historyStore = new HistoryStore(settings, internalClient); + final HistoryStore historyStore = new HistoryStore(settings, client); // schedulers final Set scheduleParsers = new HashSet<>(); @@ -344,7 +343,7 @@ public class Watcher implements ActionPlugin { final TriggerService triggerService = new TriggerService(settings, triggerEngines); final TriggeredWatch.Parser triggeredWatchParser = new TriggeredWatch.Parser(settings, triggerService); - final TriggeredWatchStore triggeredWatchStore = new TriggeredWatchStore(settings, internalClient, triggeredWatchParser); + final TriggeredWatchStore triggeredWatchStore = new TriggeredWatchStore(settings, client, triggeredWatchParser); final WatcherSearchTemplateService watcherSearchTemplateService = new WatcherSearchTemplateService(settings, scriptService, xContentRegistry); @@ -352,16 +351,13 @@ public class Watcher implements ActionPlugin { final Watch.Parser watchParser = new Watch.Parser(settings, triggerService, registry, inputRegistry, cryptoService, clock); final ExecutionService executionService = new ExecutionService(settings, historyStore, triggeredWatchStore, watchExecutor, - clock, watchParser, clusterService, internalClient); + clock, watchParser, clusterService, client); final Consumer> triggerEngineListener = getTriggerEngineListener(executionService); triggerService.register(triggerEngineListener); - final WatcherIndexTemplateRegistry watcherIndexTemplateRegistry = new WatcherIndexTemplateRegistry(settings, clusterService, - threadPool, internalClient); - WatcherService watcherService = new WatcherService(settings, triggerService, triggeredWatchStore, executionService, - watchParser, internalClient); + watchParser, client); final WatcherLifeCycleService watcherLifeCycleService = new WatcherLifeCycleService(settings, threadPool, clusterService, watcherService); @@ -369,10 +365,9 @@ public class Watcher implements ActionPlugin { listener = new WatcherIndexingListener(settings, watchParser, clock, triggerService); clusterService.addListener(listener); - return Arrays.asList(registry, watcherClient, inputRegistry, historyStore, triggerService, triggeredWatchParser, + return Arrays.asList(registry, inputRegistry, historyStore, triggerService, triggeredWatchParser, watcherLifeCycleService, executionService, triggerEngineListener, watcherService, watchParser, - configuredTriggerEngine, triggeredWatchStore, watcherSearchTemplateService, watcherIndexTemplateRegistry, - slackService, pagerDutyService, hipChatService); + configuredTriggerEngine, triggeredWatchStore, watcherSearchTemplateService, slackService, pagerDutyService, hipChatService); } protected TriggerEngine getTriggerEngine(Clock clock, ScheduleRegistry scheduleRegistry) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 74f7efbe840..407457a3b36 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -26,11 +26,11 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.execution.ExecutionService; import org.elasticsearch.xpack.watcher.execution.TriggeredWatch; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; @@ -51,6 +51,8 @@ import java.util.stream.Collectors; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalState; import static org.elasticsearch.xpack.watcher.watch.Watch.INDEX; @@ -69,7 +71,7 @@ public class WatcherService extends AbstractComponent { private final TimeValue defaultSearchTimeout; public WatcherService(Settings settings, TriggerService triggerService, TriggeredWatchStore triggeredWatchStore, - ExecutionService executionService, Watch.Parser parser, InternalClient client) { + ExecutionService executionService, Watch.Parser parser, Client client) { super(settings); this.triggerService = triggerService; this.triggeredWatchStore = triggeredWatchStore; @@ -200,35 +202,36 @@ public class WatcherService extends AbstractComponent { return Collections.emptyList(); } - RefreshResponse refreshResponse = client.admin().indices().refresh(new RefreshRequest(INDEX)) - .actionGet(TimeValue.timeValueSeconds(5)); - if (refreshResponse.getSuccessfulShards() < indexMetaData.getNumberOfShards()) { - throw illegalState("not all required shards have been refreshed"); - } - - // find out local shards - String watchIndexName = indexMetaData.getIndex().getName(); - RoutingNode routingNode = clusterState.getRoutingNodes().node(clusterState.nodes().getLocalNodeId()); - // yes, this can happen, if the state is not recovered - if (routingNode == null) { - return Collections.emptyList(); - } - List localShards = routingNode.shardsWithState(watchIndexName, RELOCATING, STARTED); - - // find out all allocation ids - List watchIndexShardRoutings = clusterState.getRoutingTable().allShards(watchIndexName); - + SearchResponse response = null; List watches = new ArrayList<>(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + RefreshResponse refreshResponse = client.admin().indices().refresh(new RefreshRequest(INDEX)) + .actionGet(TimeValue.timeValueSeconds(5)); + if (refreshResponse.getSuccessfulShards() < indexMetaData.getNumberOfShards()) { + throw illegalState("not all required shards have been refreshed"); + } + + // find out local shards + String watchIndexName = indexMetaData.getIndex().getName(); + RoutingNode routingNode = clusterState.getRoutingNodes().node(clusterState.nodes().getLocalNodeId()); + // yes, this can happen, if the state is not recovered + if (routingNode == null) { + return Collections.emptyList(); + } + List localShards = routingNode.shardsWithState(watchIndexName, RELOCATING, STARTED); + + // find out all allocation ids + List watchIndexShardRoutings = clusterState.getRoutingTable().allShards(watchIndexName); + + SearchRequest searchRequest = new SearchRequest(INDEX) + .scroll(scrollTimeout) + .preference(Preference.ONLY_LOCAL.toString()) + .source(new SearchSourceBuilder() + .size(scrollSize) + .sort(SortBuilders.fieldSort("_doc")) + .version(true)); + response = client.search(searchRequest).actionGet(defaultSearchTimeout); - SearchRequest searchRequest = new SearchRequest(INDEX) - .scroll(scrollTimeout) - .preference(Preference.ONLY_LOCAL.toString()) - .source(new SearchSourceBuilder() - .size(scrollSize) - .sort(SortBuilders.fieldSort("_doc")) - .version(true)); - SearchResponse response = client.search(searchRequest).actionGet(defaultSearchTimeout); - try { if (response.getTotalShards() != response.getSuccessfulShards()) { throw new ElasticsearchException("Partial response while loading watches"); } @@ -283,9 +286,13 @@ public class WatcherService extends AbstractComponent { response = client.searchScroll(request).actionGet(defaultSearchTimeout); } } finally { - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(response.getScrollId()); - client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); + if (response != null) { + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(response.getScrollId()); + client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); + } + } } logger.debug("Loaded [{}] watches for execution", watches.size()); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java index 9836096e1d1..3a9ebd9bdd2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/actions/index/ExecutableIndexAction.java @@ -13,9 +13,9 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.watcher.actions.Action; @@ -35,6 +35,8 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalState; public class ExecutableIndexAction extends ExecutableAction { @@ -103,7 +105,9 @@ public class ExecutableIndexAction extends ExecutableAction { XContentType.JSON)); } - response = client.index(indexRequest).get(indexDefaultTimeout.millis(), TimeUnit.MILLISECONDS); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + response = client.index(indexRequest).get(indexDefaultTimeout.millis(), TimeUnit.MILLISECONDS); + } try (XContentBuilder builder = jsonBuilder()) { indexResponseToXContent(builder, response); bytesReference = builder.bytes(); @@ -136,21 +140,23 @@ public class ExecutableIndexAction extends ExecutableAction { } bulkRequest.add(indexRequest); } - BulkResponse bulkResponse = client.bulk(bulkRequest).get(bulkDefaultTimeout.millis(), TimeUnit.MILLISECONDS); - try (XContentBuilder jsonBuilder = jsonBuilder().startArray()) { - for (BulkItemResponse item : bulkResponse) { - itemResponseToXContent(jsonBuilder, item); - } - jsonBuilder.endArray(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + BulkResponse bulkResponse = client.bulk(bulkRequest).get(bulkDefaultTimeout.millis(), TimeUnit.MILLISECONDS); + try (XContentBuilder jsonBuilder = jsonBuilder().startArray()) { + for (BulkItemResponse item : bulkResponse) { + itemResponseToXContent(jsonBuilder, item); + } + jsonBuilder.endArray(); - // different error states, depending on how successful the bulk operation was - long failures = Stream.of(bulkResponse.getItems()).filter(BulkItemResponse::isFailed).count(); - if (failures == 0) { - return new IndexAction.Result(Status.SUCCESS, new XContentSource(jsonBuilder.bytes(), XContentType.JSON)); - } else if (failures == bulkResponse.getItems().length) { - return new IndexAction.Result(Status.FAILURE, new XContentSource(jsonBuilder.bytes(), XContentType.JSON)); - } else { - return new IndexAction.Result(Status.PARTIAL_FAILURE, new XContentSource(jsonBuilder.bytes(), XContentType.JSON)); + // different error states, depending on how successful the bulk operation was + long failures = Stream.of(bulkResponse.getItems()).filter(BulkItemResponse::isFailed).count(); + if (failures == 0) { + return new IndexAction.Result(Status.SUCCESS, new XContentSource(jsonBuilder.bytes(), XContentType.JSON)); + } else if (failures == bulkResponse.getItems().length) { + return new IndexAction.Result(Status.FAILURE, new XContentSource(jsonBuilder.bytes(), XContentType.JSON)); + } else { + return new IndexAction.Result(Status.PARTIAL_FAILURE, new XContentSource(jsonBuilder.bytes(), XContentType.JSON)); + } } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java index 370edaa402c..4d4d601aac7 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -59,6 +60,8 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.joda.time.DateTimeZone.UTC; public class ExecutionService extends AbstractComponent { @@ -355,11 +358,12 @@ public class ExecutionService extends AbstractComponent { UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, watch.id()); updateRequest.doc(source); updateRequest.version(watch.version()); - try { + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { client.update(updateRequest).actionGet(indexDefaultTimeout); } catch (DocumentMissingException e) { // do not rethrow this exception, otherwise the watch history will contain an exception // even though the execution might have been fine + // TODO should we really just drop this exception on the floor? } } @@ -505,10 +509,12 @@ public class ExecutionService extends AbstractComponent { * @return The GetResponse of calling the get API of this watch */ private GetResponse getWatch(String id) { - GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, id).preference(Preference.LOCAL.type()).realtime(true); - PlainActionFuture future = PlainActionFuture.newFuture(); - client.get(getRequest, future); - return future.actionGet(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, id).preference(Preference.LOCAL.type()).realtime(true); + PlainActionFuture future = PlainActionFuture.newFuture(); + client.get(getRequest, future); + return future.actionGet(); + } } public Map usageStats() { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index 85c55271a1a..1fab51640c9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -45,6 +46,9 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.watcher.support.Exceptions.illegalState; public class TriggeredWatchStore extends AbstractComponent { @@ -107,7 +111,8 @@ public class TriggeredWatchStore extends AbstractComponent { } ensureStarted(); - client.bulk(createBulkRequest(triggeredWatches, DOC_TYPE), listener); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, createBulkRequest(triggeredWatches, DOC_TYPE), + listener, client::bulk); } public BulkResponse putAll(final List triggeredWatches) throws IOException { @@ -140,7 +145,9 @@ public class TriggeredWatchStore extends AbstractComponent { public void delete(Wid wid) { ensureStarted(); DeleteRequest request = new DeleteRequest(INDEX_NAME, DOC_TYPE, wid.value()); - client.delete(request); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + client.delete(request); // FIXME shouldn't we wait before saying the delete was successful + } logger.trace("successfully deleted triggered watch with id [{}]", wid); } @@ -170,7 +177,7 @@ public class TriggeredWatchStore extends AbstractComponent { return Collections.emptyList(); } - try { + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { client.admin().indices().refresh(new RefreshRequest(TriggeredWatchStore.INDEX_NAME)).actionGet(TimeValue.timeValueSeconds(5)); } catch (IndexNotFoundException e) { return Collections.emptyList(); @@ -187,9 +194,10 @@ public class TriggeredWatchStore extends AbstractComponent { .sort(SortBuilders.fieldSort("_doc")) .version(true)); - SearchResponse response = client.search(searchRequest).actionGet(defaultSearchTimeout); - logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits()); - try { + SearchResponse response = null; + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + response = client.search(searchRequest).actionGet(defaultSearchTimeout); + logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits()); while (response.getHits().getHits().length != 0) { for (SearchHit hit : response.getHits()) { Wid wid = new Wid(hit.getId()); @@ -203,9 +211,13 @@ public class TriggeredWatchStore extends AbstractComponent { response = client.searchScroll(request).actionGet(defaultSearchTimeout); } } finally { - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(response.getScrollId()); - client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); + if (response != null) { + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(response.getScrollId()); + client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); + } + } } return triggeredWatches; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java index 73d93d5320e..ef98b439f3e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; @@ -34,6 +35,8 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.watcher.support.Exceptions.ioException; public class HistoryStore extends AbstractComponent { @@ -79,7 +82,8 @@ public class HistoryStore extends AbstractComponent { } String index = getHistoryIndexNameForTime(watchRecord.triggerEvent().triggeredTime()); putUpdateLock.lock(); - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + try (XContentBuilder builder = XContentFactory.jsonBuilder(); + ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { watchRecord.toXContent(builder, WatcherParams.builder().hideSecrets(true).build()); IndexRequest request = new IndexRequest(index, DOC_TYPE, watchRecord.id().value()) @@ -105,7 +109,8 @@ public class HistoryStore extends AbstractComponent { String index = getHistoryIndexNameForTime(watchRecord.triggerEvent().triggeredTime()); putUpdateLock.lock(); try { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + try (XContentBuilder builder = XContentFactory.jsonBuilder(); + ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { watchRecord.toXContent(builder, WatcherParams.builder().hideSecrets(true).build()); IndexRequest request = new IndexRequest(index, DOC_TYPE, watchRecord.id().value()) @@ -116,7 +121,8 @@ public class HistoryStore extends AbstractComponent { } catch (VersionConflictEngineException vcee) { watchRecord = new WatchRecord.MessageWatchRecord(watchRecord, ExecutionState.EXECUTED_MULTIPLE_TIMES, "watch record [{ " + watchRecord.id() + " }] has been stored before, previous state [" + watchRecord.state() + "]"); - try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder(); + ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { IndexRequest request = new IndexRequest(index, DOC_TYPE, watchRecord.id().value()) .source(xContentBuilder.value(watchRecord)); client.index(request).get(30, TimeUnit.SECONDS); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java index 056463d3401..8d2de53a539 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -27,6 +28,8 @@ import org.elasticsearch.xpack.watcher.watch.Payload; import java.util.Map; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.watcher.input.search.SearchInput.TYPE; /** @@ -67,7 +70,10 @@ public class ExecutableSearchInput extends ExecutableInput templateCreationsInProgress = new ConcurrentHashMap<>(); - public WatcherIndexTemplateRegistry(Settings settings, ClusterService clusterService, ThreadPool threadPool, InternalClient client) { + public WatcherIndexTemplateRegistry(Settings settings, ClusterService clusterService, ThreadPool threadPool, Client client) { super(settings); this.client = client; this.threadPool = threadPool; @@ -112,21 +115,22 @@ public class WatcherIndexTemplateRegistry extends AbstractComponent implements C PutIndexTemplateRequest request = new PutIndexTemplateRequest(templateName).source(config.load(), XContentType.JSON); request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); - client.admin().indices().putTemplate(request, new ActionListener() { - @Override - public void onResponse(PutIndexTemplateResponse response) { - creationCheck.set(false); - if (response.isAcknowledged() == false) { - logger.error("Error adding watcher template [{}], request was not acknowledged", templateName); - } - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, request, + new ActionListener() { + @Override + public void onResponse(PutIndexTemplateResponse response) { + creationCheck.set(false); + if (response.isAcknowledged() == false) { + logger.error("Error adding watcher template [{}], request was not acknowledged", templateName); + } + } - @Override - public void onFailure(Exception e) { - creationCheck.set(false); - logger.error(new ParameterizedMessage("Error adding watcher template [{}]", templateName), e); - } - }); + @Override + public void onFailure(Exception e) { + creationCheck.set(false); + logger.error(new ParameterizedMessage("Error adding watcher template [{}]", templateName), e); + } + }, client.admin().indices()::putTemplate); }); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index a0a8e17d7db..e9218831a1e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -8,9 +8,11 @@ package org.elasticsearch.xpack.watcher.transport.actions.ack; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; @@ -22,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.Watch; @@ -33,6 +34,8 @@ import java.util.Arrays; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.joda.time.DateTimeZone.UTC; public class TransportAckWatchAction extends WatcherTransportAction { @@ -44,7 +47,7 @@ public class TransportAckWatchAction extends WatcherTransportAction { - if (response.isExists() == false) { - listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", request.getWatchId())); - } else { - DateTime now = new DateTime(clock.millis(), UTC); - Watch watch = parser.parseWithSecrets(request.getWatchId(), true, response.getSourceAsBytesRef(), now, XContentType.JSON); - watch.version(response.getVersion()); - watch.status().version(response.getVersion()); - String[] actionIds = request.getActionIds(); - if (actionIds == null || actionIds.length == 0) { - actionIds = new String[]{Watch.ALL_ACTIONS_ID}; - } + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, getRequest, + ActionListener.wrap((response) -> { + if (response.isExists() == false) { + listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", request.getWatchId())); + } else { + DateTime now = new DateTime(clock.millis(), UTC); + Watch watch = + parser.parseWithSecrets(request.getWatchId(), true, response.getSourceAsBytesRef(), now, XContentType.JSON); + watch.version(response.getVersion()); + watch.status().version(response.getVersion()); + String[] actionIds = request.getActionIds(); + if (actionIds == null || actionIds.length == 0) { + actionIds = new String[]{Watch.ALL_ACTIONS_ID}; + } - // exit early in case nothing changes - boolean isChanged = watch.ack(now, actionIds); - if (isChanged == false) { - listener.onResponse(new AckWatchResponse(watch.status())); - return; - } + // exit early in case nothing changes + boolean isChanged = watch.ack(now, actionIds); + if (isChanged == false) { + listener.onResponse(new AckWatchResponse(watch.status())); + return; + } - UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()); - // this may reject this action, but prevents concurrent updates from a watch execution - updateRequest.version(response.getVersion()); - updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - XContentBuilder builder = jsonBuilder(); - builder.startObject() - .startObject(Watch.Field.STATUS.getPreferredName()) - .startObject("actions"); + UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()); + // this may reject this action, but prevents concurrent updates from a watch execution + updateRequest.version(response.getVersion()); + updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + XContentBuilder builder = jsonBuilder(); + builder.startObject() + .startObject(Watch.Field.STATUS.getPreferredName()) + .startObject("actions"); - List actionIdsAsList = Arrays.asList(actionIds); - boolean updateAll = actionIdsAsList.contains("_all"); - for (ActionWrapper actionWrapper : watch.actions()) { - if (updateAll || actionIdsAsList.contains(actionWrapper.id())) { - builder.startObject(actionWrapper.id()) - .field("ack", watch.status().actionStatus(actionWrapper.id()).ackStatus(), ToXContent.EMPTY_PARAMS) - .endObject(); + List actionIdsAsList = Arrays.asList(actionIds); + boolean updateAll = actionIdsAsList.contains("_all"); + for (ActionWrapper actionWrapper : watch.actions()) { + if (updateAll || actionIdsAsList.contains(actionWrapper.id())) { + builder.startObject(actionWrapper.id()) + .field("ack", watch.status().actionStatus(actionWrapper.id()).ackStatus(), ToXContent.EMPTY_PARAMS) + .endObject(); + } + } + + builder.endObject().endObject().endObject(); + updateRequest.doc(builder); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, updateRequest, + ActionListener.wrap( + (updateResponse) -> listener.onResponse(new AckWatchResponse(watch.status())), + listener::onFailure), client::update); } - } - - builder.endObject().endObject().endObject(); - updateRequest.doc(builder); - - client.update(updateRequest, ActionListener.wrap( - (updateResponse) -> listener.onResponse(new AckWatchResponse(watch.status())), - listener::onFailure)); - } - }, listener::onFailure)); + }, listener::onFailure), client::get); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java index 614fe5e3254..1f543643356 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java @@ -8,9 +8,11 @@ package org.elasticsearch.xpack.watcher.transport.actions.activate; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; @@ -21,7 +23,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.watch.WatchStatus; @@ -31,6 +32,8 @@ import java.io.IOException; import java.time.Clock; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.watcher.support.WatcherDateTimeUtils.writeDate; import static org.joda.time.DateTimeZone.UTC; @@ -46,7 +49,7 @@ public class TransportActivateWatchAction extends WatcherTransportAction { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, updateRequest, + ActionListener.wrap(updateResponse -> { GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()) .preference(Preference.LOCAL.type()).realtime(true); - client.get(getRequest, ActionListener.wrap(getResponse -> { - if (getResponse.isExists()) { - Watch watch = parser.parseWithSecrets(request.getWatchId(), true, getResponse.getSourceAsBytesRef(), now, - XContentType.JSON); - watch.version(getResponse.getVersion()); - watch.status().version(getResponse.getVersion()); - listener.onResponse(new ActivateWatchResponse(watch.status())); - } else { - listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", request.getWatchId())); - } - }, listener::onFailure)); - }, listener::onFailure)); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, getRequest, + ActionListener.wrap(getResponse -> { + if (getResponse.isExists()) { + Watch watch = parser.parseWithSecrets(request.getWatchId(), true, getResponse.getSourceAsBytesRef(), now, + XContentType.JSON); + watch.version(getResponse.getVersion()); + watch.status().version(getResponse.getVersion()); + listener.onResponse(new ActivateWatchResponse(watch.status())); + } else { + listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", + request.getWatchId())); + } + }, listener::onFailure), client::get); + }, listener::onFailure), client::update); } catch (IOException e) { listener.onFailure(e); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java index 776ce86432c..2287393f886 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/delete/TransportDeleteWatchAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.transport.actions.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; @@ -17,9 +18,11 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.watch.Watch; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + /** * Performs the delete operation. This inherits directly from HandledTransportAction, because deletion should always work * independently from the license check in WatcherTransportAction! @@ -31,7 +34,7 @@ public class TransportDeleteWatchAction extends HandledTransportAction listener) { DeleteRequest deleteRequest = new DeleteRequest(Watch.INDEX, Watch.DOC_TYPE, request.getId()); deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - client.delete(deleteRequest, ActionListener.wrap(deleteResponse -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, deleteRequest, + ActionListener.wrap(deleteResponse -> { boolean deleted = deleteResponse.getResult() == DocWriteResponse.Result.DELETED; DeleteWatchResponse response = new DeleteWatchResponse(deleteResponse.getId(), deleteResponse.getVersion(), deleted); listener.onResponse(response); - }, - listener::onFailure)); + }, listener::onFailure), client::delete); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java index 5cf27b46f65..ad7eb0e009c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java @@ -6,10 +6,10 @@ package org.elasticsearch.xpack.watcher.transport.actions.execute; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -23,7 +23,6 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode; import org.elasticsearch.xpack.watcher.execution.ExecutionService; @@ -43,6 +42,8 @@ import java.io.IOException; import java.time.Clock; import java.util.Map; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.joda.time.DateTimeZone.UTC; /** @@ -60,7 +61,7 @@ public class TransportExecuteWatchAction extends WatcherTransportAction { - if (response.isExists()) { - Watch watch = watchParser.parse(request.getId(), true, response.getSourceAsBytesRef(), request.getXContentType()); - watch.version(response.getVersion()); - watch.status().version(response.getVersion()); - executeWatch(request, listener, watch, true); - } else { - listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", request.getId())); - } - }, listener::onFailure)); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, getRequest, + ActionListener.wrap(response -> { + if (response.isExists()) { + Watch watch = + watchParser.parse(request.getId(), true, response.getSourceAsBytesRef(), request.getXContentType()); + watch.version(response.getVersion()); + watch.status().version(response.getVersion()); + executeWatch(request, listener, watch, true); + } else { + listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", request.getId())); + } + }, listener::onFailure), client::get); } else if (request.getWatchSource() != null) { try { assert !request.isRecordExecution(); @@ -93,7 +96,7 @@ public class TransportExecuteWatchAction extends WatcherTransportAction) () -> new ParameterizedMessage("failed to parse [{}]", request.getId()), e); + logger.error(new ParameterizedMessage("failed to parse [{}]", request.getId()), e); listener.onFailure(e); } } else { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java index 9252158ccd0..71c2fc7968b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.watcher.transport.actions.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -19,7 +20,6 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.Watch; @@ -28,6 +28,8 @@ import org.joda.time.DateTime; import java.time.Clock; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.joda.time.DateTimeZone.UTC; public class TransportGetWatchAction extends WatcherTransportAction { @@ -39,7 +41,7 @@ public class TransportGetWatchAction extends WatcherTransportAction { - if (getResponse.isExists()) { - try (XContentBuilder builder = jsonBuilder()) { - // When we return the watch via the Get Watch REST API, we want to return the watch as was specified in the put api, - // we don't include the status in the watch source itself, but as a separate top level field, so that - // it indicates the the status is managed by watcher itself. - DateTime now = new DateTime(clock.millis(), UTC); - Watch watch = parser.parseWithSecrets(request.getId(), true, getResponse.getSourceAsBytesRef(), now, XContentType.JSON); - watch.toXContent(builder, WatcherParams.builder() - .hideSecrets(true) - .put(Watch.INCLUDE_STATUS_KEY, false) - .build()); - watch.version(getResponse.getVersion()); - watch.status().version(getResponse.getVersion()); - listener.onResponse(new GetWatchResponse(watch.id(), watch.status(), builder.bytes(), XContentType.JSON)); - } - } else { - listener.onResponse(new GetWatchResponse(request.getId())); - } - }, e -> { - // special case. This API should not care if the index is missing or not, it should respond with the watch not being found - if (e instanceof IndexNotFoundException) { - listener.onResponse(new GetWatchResponse(request.getId())); - } else { - listener.onFailure(e); - } - })); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, getRequest, + ActionListener.wrap(getResponse -> { + if (getResponse.isExists()) { + try (XContentBuilder builder = jsonBuilder()) { + // When we return the watch via the Get Watch REST API, we want to return the watch as was specified in + // the put api, we don't include the status in the watch source itself, but as a separate top level field, + // so that it indicates the the status is managed by watcher itself. + DateTime now = new DateTime(clock.millis(), UTC); + Watch watch = parser.parseWithSecrets(request.getId(), true, getResponse.getSourceAsBytesRef(), now, + XContentType.JSON); + watch.toXContent(builder, WatcherParams.builder() + .hideSecrets(true) + .put(Watch.INCLUDE_STATUS_KEY, false) + .build()); + watch.version(getResponse.getVersion()); + watch.status().version(getResponse.getVersion()); + listener.onResponse(new GetWatchResponse(watch.id(), watch.status(), builder.bytes(), XContentType.JSON)); + } + } else { + listener.onResponse(new GetWatchResponse(request.getId())); + } + }, e -> { + // special case. This API should not care if the index is missing or not, + // it should respond with the watch not being found + if (e instanceof IndexNotFoundException) { + listener.onResponse(new GetWatchResponse(request.getId())); + } else { + listener.onFailure(e); + } + }), client::get); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java index c6b4d04db86..422199c6b98 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java @@ -8,8 +8,10 @@ package org.elasticsearch.xpack.watcher.transport.actions.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; @@ -19,7 +21,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.Payload; @@ -29,18 +30,20 @@ import org.joda.time.DateTime; import java.time.Clock; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.ClientHelper.WATCHER_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; import static org.joda.time.DateTimeZone.UTC; public class TransportPutWatchAction extends WatcherTransportAction { private final Clock clock; private final Watch.Parser parser; - private final InternalClient client; + private final Client client; @Inject public TransportPutWatchAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Clock clock, XPackLicenseState licenseState, - Watch.Parser parser, InternalClient client) { + Watch.Parser parser, Client client) { super(settings, PutWatchAction.NAME, transportService, threadPool, actionFilters, indexNameExpressionResolver, licenseState, PutWatchRequest::new); this.clock = clock; @@ -64,10 +67,12 @@ public class TransportPutWatchAction extends WatcherTransportAction { - boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; - listener.onResponse(new PutWatchResponse(indexResponse.getId(), indexResponse.getVersion(), created)); - }, listener::onFailure)); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, indexRequest, + ActionListener.wrap(indexResponse -> { + boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; + listener.onResponse(new PutWatchResponse(indexResponse.getId(), indexResponse.getVersion(), created)); + }, listener::onFailure), + client::index); } } catch (Exception e) { listener.onFailure(e); diff --git a/plugin/src/main/resources/logstash-index-template.json b/plugin/src/main/resources/logstash-index-template.json index d70b5b4f5f4..462e355ff88 100644 --- a/plugin/src/main/resources/logstash-index-template.json +++ b/plugin/src/main/resources/logstash-index-template.json @@ -1,5 +1,5 @@ { - "index_patterns" : ".logstash", + "index_patterns" : [ ".logstash" ], "settings": { "index": { "number_of_shards": 1, diff --git a/plugin/src/main/resources/monitoring-alerts.json b/plugin/src/main/resources/monitoring-alerts.json index e59bb08e275..cf4289ff647 100644 --- a/plugin/src/main/resources/monitoring-alerts.json +++ b/plugin/src/main/resources/monitoring-alerts.json @@ -1,5 +1,5 @@ { - "index_patterns": ".monitoring-alerts-${monitoring.template.version}", + "index_patterns": [ ".monitoring-alerts-${monitoring.template.version}" ], "version": 7000001, "settings": { "index": { diff --git a/plugin/src/main/resources/monitoring-beats.json b/plugin/src/main/resources/monitoring-beats.json index e4e137e4902..db72cbdff15 100644 --- a/plugin/src/main/resources/monitoring-beats.json +++ b/plugin/src/main/resources/monitoring-beats.json @@ -1,5 +1,5 @@ { - "index_patterns": ".monitoring-beats-${monitoring.template.version}-*", + "index_patterns": [ ".monitoring-beats-${monitoring.template.version}-*" ], "version": 7000001, "settings": { "index.number_of_shards": 1, diff --git a/plugin/src/main/resources/monitoring-es.json b/plugin/src/main/resources/monitoring-es.json index da31d95f8ca..926edf535ec 100644 --- a/plugin/src/main/resources/monitoring-es.json +++ b/plugin/src/main/resources/monitoring-es.json @@ -1,5 +1,5 @@ { - "index_patterns": ".monitoring-es-${monitoring.template.version}-*", + "index_patterns": [ ".monitoring-es-${monitoring.template.version}-*" ], "version": 7000001, "settings": { "index.number_of_shards": 1, diff --git a/plugin/src/main/resources/monitoring-kibana.json b/plugin/src/main/resources/monitoring-kibana.json index b05f8e2db87..b7e6dfb2d01 100644 --- a/plugin/src/main/resources/monitoring-kibana.json +++ b/plugin/src/main/resources/monitoring-kibana.json @@ -1,5 +1,5 @@ { - "index_patterns": ".monitoring-kibana-${monitoring.template.version}-*", + "index_patterns": [ ".monitoring-kibana-${monitoring.template.version}-*" ], "version": 7000001, "settings": { "index.number_of_shards": 1, diff --git a/plugin/src/main/resources/monitoring-logstash.json b/plugin/src/main/resources/monitoring-logstash.json index 68c292f5351..be250d150a7 100644 --- a/plugin/src/main/resources/monitoring-logstash.json +++ b/plugin/src/main/resources/monitoring-logstash.json @@ -1,5 +1,5 @@ { - "index_patterns": ".monitoring-logstash-${monitoring.template.version}-*", + "index_patterns": [ ".monitoring-logstash-${monitoring.template.version}-*" ], "version": 7000001, "settings": { "index.number_of_shards": 1, diff --git a/plugin/src/main/resources/triggered-watches.json b/plugin/src/main/resources/triggered-watches.json index 035bcc8674b..3c89d83a69e 100644 --- a/plugin/src/main/resources/triggered-watches.json +++ b/plugin/src/main/resources/triggered-watches.json @@ -1,5 +1,5 @@ { - "index_patterns": ".triggered_watches*", + "index_patterns": [ ".triggered_watches*" ], "order": 2147483647, "settings": { "index.number_of_shards": 1, diff --git a/plugin/src/main/resources/watch-history.json b/plugin/src/main/resources/watch-history.json index 262c58f0984..af4b221b93e 100644 --- a/plugin/src/main/resources/watch-history.json +++ b/plugin/src/main/resources/watch-history.json @@ -1,5 +1,5 @@ { - "index_patterns": ".watcher-history-${xpack.watcher.template.version}*", + "index_patterns": [ ".watcher-history-${xpack.watcher.template.version}*" ], "order": 2147483647, "settings": { "xpack.watcher.template.version": "${xpack.watcher.template.version}", diff --git a/plugin/src/main/resources/watches.json b/plugin/src/main/resources/watches.json index babd4a85112..278c6275e69 100644 --- a/plugin/src/main/resources/watches.json +++ b/plugin/src/main/resources/watches.json @@ -1,5 +1,5 @@ { - "index_patterns": ".watches*", + "index_patterns": [ ".watches*" ], "order": 2147483647, "settings": { "index.number_of_shards": 1, diff --git a/plugin/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/plugin/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index de4681568ab..30f458d5644 100644 --- a/plugin/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.plugins.Plugin; @@ -32,9 +33,8 @@ import org.elasticsearch.xpack.XPackClient; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.Security; +import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.client.SecurityClient; import org.junit.AfterClass; import org.junit.Before; @@ -419,18 +419,6 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { return client -> (client instanceof NodeClient) ? client.filterWithHeader(headers) : client; } - protected InternalClient internalClient() { - return internalCluster().getInstance(InternalClient.class); - } - - protected InternalSecurityClient internalSecurityClient() { - return internalSecurityClient(client()); - } - - protected InternalSecurityClient internalSecurityClient(Client client) { - return new InternalSecurityClient(client.settings(), client.threadPool(), client); - } - protected SecurityClient securityClient() { return securityClient(client()); } @@ -493,15 +481,17 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { } protected void deleteSecurityIndex() { - final InternalSecurityClient securityClient = internalSecurityClient(); + final Client client = client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING))); GetIndexRequest getIndexRequest = new GetIndexRequest(); getIndexRequest.indices(SECURITY_INDEX_NAME); getIndexRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); - GetIndexResponse getIndexResponse = securityClient.admin().indices().getIndex(getIndexRequest).actionGet(); + GetIndexResponse getIndexResponse = client.admin().indices().getIndex(getIndexRequest).actionGet(); if (getIndexResponse.getIndices().length > 0) { - // this is a hack to clean up the .security index since only the XPack user can delete it + // this is a hack to clean up the .security index since only a superuser can delete it DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(getIndexResponse.getIndices()); - securityClient.admin().indices().delete(deleteIndexRequest).actionGet(); + client.admin().indices().delete(deleteIndexRequest).actionGet(); } } diff --git a/plugin/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/plugin/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 02bf772fa4c..e9104ea7e61 100644 --- a/plugin/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/plugin/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -58,17 +58,20 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas public static final SecureString TEST_PASSWORD_SECURE_STRING = new SecureString("x-pack-test-password".toCharArray()); public static final String TEST_PASSWORD_HASHED = new String(Hasher.BCRYPT.hash(new SecureString(TEST_PASSWORD.toCharArray()))); public static final String TEST_ROLE = "user"; + public static final String TEST_SUPERUSER = "test_superuser"; public static final String DEFAULT_TRANSPORT_CLIENT_ROLE = "transport_client"; public static final String DEFAULT_TRANSPORT_CLIENT_USER_NAME = "test_trans_client_user"; public static final String CONFIG_STANDARD_USER = TEST_USER_NAME + ":" + TEST_PASSWORD_HASHED + "\n" + - DEFAULT_TRANSPORT_CLIENT_USER_NAME + ":" + TEST_PASSWORD_HASHED + "\n"; + DEFAULT_TRANSPORT_CLIENT_USER_NAME + ":" + TEST_PASSWORD_HASHED + "\n" + + TEST_SUPERUSER + ":" + TEST_PASSWORD_HASHED + "\n"; public static final String CONFIG_STANDARD_USER_ROLES = TEST_ROLE + ":" + TEST_USER_NAME + "," + DEFAULT_TRANSPORT_CLIENT_USER_NAME + "\n" + - DEFAULT_TRANSPORT_CLIENT_ROLE + ":" + DEFAULT_TRANSPORT_CLIENT_USER_NAME+ "\n"; + DEFAULT_TRANSPORT_CLIENT_ROLE + ":" + DEFAULT_TRANSPORT_CLIENT_USER_NAME + "\n" + + "superuser:" + TEST_SUPERUSER + "\n"; public static final String CONFIG_ROLE_ALLOW_ALL = TEST_ROLE + ":\n" + diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ClientHelperTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ClientHelperTests.java new file mode 100644 index 00000000000..b9212a00216 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/ClientHelperTests.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.concurrent.CountDownLatch; + +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ClientHelperTests extends ESTestCase { + + public void testStashContext() { + final String origin = randomAlphaOfLengthBetween(4, 16); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + + final boolean setOtherValues = randomBoolean(); + if (setOtherValues) { + threadContext.putTransient("foo", "bar"); + threadContext.putHeader("foo", "bar"); + } + + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + ThreadContext.StoredContext storedContext = ClientHelper.stashWithOrigin(threadContext, origin); + assertEquals(origin, threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertNull(threadContext.getTransient("foo")); + assertNull(threadContext.getTransient("bar")); + + storedContext.close(); + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + + if (setOtherValues) { + assertEquals("bar", threadContext.getTransient("foo")); + assertEquals("bar", threadContext.getHeader("foo")); + } + } + + public void testExecuteAsyncWrapsListener() throws Exception { + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final String headerName = randomAlphaOfLengthBetween(4, 16); + final String headerValue = randomAlphaOfLengthBetween(4, 16); + final String origin = randomAlphaOfLengthBetween(4, 16); + final CountDownLatch latch = new CountDownLatch(2); + final ActionListener listener = ActionListener.wrap(v -> { + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertEquals(headerValue, threadContext.getHeader(headerName)); + latch.countDown(); + }, e -> fail(e.getMessage())); + + final ClusterHealthRequest request = new ClusterHealthRequest(); + threadContext.putHeader(headerName, headerValue); + + ClientHelper.executeAsyncWithOrigin(threadContext, origin, request, listener, (req, listener1) -> { + assertSame(request, req); + assertEquals(origin, threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertNull(threadContext.getHeader(headerName)); + latch.countDown(); + listener1.onResponse(null); + }); + + latch.await(); + } + + public void testExecuteWithClient() throws Exception { + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final Client client = mock(Client.class); + final ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(threadContext); + + final String headerName = randomAlphaOfLengthBetween(4, 16); + final String headerValue = randomAlphaOfLengthBetween(4, 16); + final String origin = randomAlphaOfLengthBetween(4, 16); + final CountDownLatch latch = new CountDownLatch(2); + final ActionListener listener = ActionListener.wrap(v -> { + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertEquals(headerValue, threadContext.getHeader(headerName)); + latch.countDown(); + }, e -> fail(e.getMessage())); + + doAnswer(invocationOnMock -> { + assertEquals(origin, threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertNull(threadContext.getHeader(headerName)); + latch.countDown(); + ((ActionListener)invocationOnMock.getArguments()[2]).onResponse(null); + return null; + }).when(client).execute(anyObject(), anyObject(), anyObject()); + + threadContext.putHeader(headerName, headerValue); + ClientHelper.executeAsyncWithOrigin(client, origin, ClusterHealthAction.INSTANCE, new ClusterHealthRequest(), listener); + + latch.await(); + } + + public void testClientWithOrigin() throws Exception { + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final Client client = mock(Client.class); + final ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(threadContext); + when(client.settings()).thenReturn(Settings.EMPTY); + + final String headerName = randomAlphaOfLengthBetween(4, 16); + final String headerValue = randomAlphaOfLengthBetween(4, 16); + final String origin = randomAlphaOfLengthBetween(4, 16); + final CountDownLatch latch = new CountDownLatch(2); + final ActionListener listener = ActionListener.wrap(v -> { + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertEquals(headerValue, threadContext.getHeader(headerName)); + latch.countDown(); + }, e -> fail(e.getMessage())); + + + doAnswer(invocationOnMock -> { + assertEquals(origin, threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertNull(threadContext.getHeader(headerName)); + latch.countDown(); + ((ActionListener)invocationOnMock.getArguments()[2]).onResponse(null); + return null; + }).when(client).execute(anyObject(), anyObject(), anyObject()); + + threadContext.putHeader(headerName, headerValue); + Client clientWithOrigin = ClientHelper.clientWithOrigin(client, origin); + clientWithOrigin.execute(null, null, listener); + latch.await(); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/logstash/LogstashTemplateRegistryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/logstash/LogstashTemplateRegistryTests.java deleted file mode 100644 index 44b2a79fa5e..00000000000 --- a/plugin/src/test/java/org/elasticsearch/xpack/logstash/LogstashTemplateRegistryTests.java +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.logstash; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTransportClient; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.template.TemplateUtils; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.ExecutorService; - -import static org.elasticsearch.mock.orig.Mockito.times; -import static org.elasticsearch.xpack.logstash.LogstashTemplateRegistry.LOGSTASH_INDEX_NAME; -import static org.elasticsearch.xpack.logstash.LogstashTemplateRegistry.LOGSTASH_TEMPLATE_NAME; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class LogstashTemplateRegistryTests extends ESTestCase { - private static final int NUM_LOGSTASH_INDEXES = 1; // .logstash - - private InternalClient client; - private ExecutorService executorService; - private TransportClient transportClient; - private ThreadPool threadPool; - private ClusterService clusterService; - private LogstashTemplateRegistry logstashTemplateRegistry; - private static final ClusterState EMPTY_CLUSTER_STATE = - new ClusterState.Builder(new ClusterName("test-cluster")).build(); - CopyOnWriteArrayList listeners; - - @Before - public void setup() { - executorService = mock(ExecutorService.class); - threadPool = mock(ThreadPool.class); - clusterService = mock(ClusterService.class); - - final ExecutorService executorService = EsExecutors.newDirectExecutorService(); - when(threadPool.executor(ThreadPool.Names.GENERIC)).thenReturn(executorService); - - transportClient = new MockTransportClient(Settings.EMPTY); - class TestInternalClient extends InternalClient { - TestInternalClient(Client transportClient) { - super(Settings.EMPTY, null, transportClient); - } - - @Override - protected > - void doExecute(Action action, Request request, - ActionListener listener) { - listeners.add(listener); - } - } - client = new TestInternalClient(transportClient); - listeners = new CopyOnWriteArrayList<>(); - logstashTemplateRegistry = new LogstashTemplateRegistry(Settings.EMPTY, clusterService, client); - } - - @After - public void stop() throws InterruptedException { - if (transportClient != null) { - transportClient.close(); - } - } - - public void testAddsListener() throws Exception { - LogstashTemplateRegistry templateRegistry = new LogstashTemplateRegistry(Settings.EMPTY, clusterService, client); - verify(clusterService, times(1)).addListener(templateRegistry); - } - - public void testAddTemplatesIfMissing() throws IOException { - ClusterState.Builder clusterStateBuilder = createClusterStateWithTemplate( - "/" + LOGSTASH_TEMPLATE_NAME + ".json" - ); - logstashTemplateRegistry.clusterChanged(new ClusterChangedEvent("test-event", - clusterStateBuilder.build(), EMPTY_CLUSTER_STATE)); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(true)); - assertThat(listeners, hasSize(0)); - } - - public void testWrongVersionIndexTemplate_isIdentifiedAsNotUpToDate() throws IOException { - String templateString = "/wrong-version-" + LOGSTASH_TEMPLATE_NAME + ".json"; - ClusterState.Builder clusterStateBuilder = createClusterStateWithTemplate(templateString); - - logstashTemplateRegistry.clusterChanged(new ClusterChangedEvent("test-event", - clusterStateBuilder.build(), EMPTY_CLUSTER_STATE)); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(false)); - assertThat(listeners, hasSize(NUM_LOGSTASH_INDEXES)); - } - - public void testWrongVersionIndexTemplate_isUpdated() throws IOException { - String templateString = "/wrong-version-" + LOGSTASH_TEMPLATE_NAME + ".json"; - ClusterState.Builder clusterStateBuilder = createClusterStateWithTemplate(templateString); - - final ClusterState clusterState = clusterStateBuilder.build(); - logstashTemplateRegistry.clusterChanged(new ClusterChangedEvent("test-event", - clusterState, EMPTY_CLUSTER_STATE)); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(false)); - assertThat(listeners, hasSize(NUM_LOGSTASH_INDEXES)); - assertThat("Expected pending template creation", logstashTemplateRegistry.isTemplateCreationPending(), is(true)); - - // if we do it again this should not send an update - ActionListener listener = listeners.get(0); - listeners.clear(); - logstashTemplateRegistry.clusterChanged(new ClusterChangedEvent("test-event", - clusterState, EMPTY_CLUSTER_STATE)); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(false)); - assertThat(listeners, hasSize(0)); - assertThat("Expected pending template creation", logstashTemplateRegistry.isTemplateCreationPending(), is(true)); - - // if we now simulate an error... - listener.onFailure(new Exception()); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(false)); - assertFalse(logstashTemplateRegistry.isTemplateCreationPending()); - - // ... we should be able to send a new update - logstashTemplateRegistry.clusterChanged(new ClusterChangedEvent("test-event", - clusterState, EMPTY_CLUSTER_STATE)); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(false)); - assertThat(listeners, hasSize(1)); - assertThat("Expected pending template creation", logstashTemplateRegistry.isTemplateCreationPending(), is(true)); - - // now check what happens if we get back an unacknowledged response - listeners.get(0).onResponse(new TestPutIndexTemplateResponse()); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(false)); - assertThat("Didn't expect pending template creation", logstashTemplateRegistry.isTemplateCreationPending(), is(false)); - - // and now let's see what happens if we get back a response - listeners.clear(); - logstashTemplateRegistry.clusterChanged(new ClusterChangedEvent("test-event", - clusterState, EMPTY_CLUSTER_STATE)); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(false)); - assertThat("Expected pending template creation", logstashTemplateRegistry.isTemplateCreationPending(), is(true)); - assertThat(listeners, hasSize(1)); - listeners.get(0).onResponse(new TestPutIndexTemplateResponse(true)); - assertThat(logstashTemplateRegistry.isTemplateUpToDate(), equalTo(true)); - assertThat("Didn't expect pending template creation", logstashTemplateRegistry.isTemplateCreationPending(), is(false)); - } - - private static ClusterState.Builder createClusterStateWithTemplate(String logstashTemplateString) throws IOException { - MetaData.Builder metaDataBuilder = new MetaData.Builder(); - - IndexTemplateMetaData.Builder logstashTemplateBuilder = - getIndexTemplateMetaData(LOGSTASH_TEMPLATE_NAME, logstashTemplateString); - metaDataBuilder.put(logstashTemplateBuilder); - // add the correct mapping no matter what the template - String logstashMappingString = "/" + LOGSTASH_TEMPLATE_NAME + ".json"; - IndexMetaData.Builder logstashIndexMeta = - createIndexMetadata(LOGSTASH_INDEX_NAME, logstashMappingString); - metaDataBuilder.put(logstashIndexMeta); - - return ClusterState.builder(state()).metaData(metaDataBuilder.build()); - } - - private static IndexTemplateMetaData.Builder getIndexTemplateMetaData( - String templateName, String templateString) throws IOException { - - String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString(), - LogstashTemplateRegistry.TEMPLATE_VERSION_PATTERN); - PutIndexTemplateRequest request = new PutIndexTemplateRequest(); - request.source(template, XContentType.JSON); - IndexTemplateMetaData.Builder templateBuilder = - IndexTemplateMetaData.builder(templateName); - for (Map.Entry entry : request.mappings().entrySet()) { - templateBuilder.putMapping(entry.getKey(), entry.getValue()); - } - return templateBuilder; - } - - private static IndexMetaData.Builder createIndexMetadata( - String indexName, String templateString) throws IOException { - String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString(), - LogstashTemplateRegistry.TEMPLATE_VERSION_PATTERN); - PutIndexTemplateRequest request = new PutIndexTemplateRequest(); - request.source(template, XContentType.JSON); - IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); - indexMetaData.settings(Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .build()); - - for (Map.Entry entry : request.mappings().entrySet()) { - indexMetaData.putMapping(entry.getKey(), entry.getValue()); - } - return indexMetaData; - } - - // cluster state where local node is master - private static ClusterState state() { - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - discoBuilder.masterNodeId("1"); - discoBuilder.localNodeId("1"); - ClusterState.Builder state = ClusterState.builder(new ClusterName("test-cluster")); - state.nodes(discoBuilder); - state.metaData(MetaData.builder().generateClusterUuidIfNeeded()); - return state.build(); - } - - private static class TestPutIndexTemplateResponse extends PutIndexTemplateResponse { - TestPutIndexTemplateResponse(boolean acknowledged) { - super(acknowledged); - } - - TestPutIndexTemplateResponse() { - super(); - } - } -} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTemplateRegistryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTemplateRegistryTests.java deleted file mode 100644 index 6488d150692..00000000000 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTemplateRegistryTests.java +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; -import org.elasticsearch.xpack.ml.job.persistence.MockClientBuilder; -import org.elasticsearch.xpack.ml.notifications.AuditMessage; -import org.elasticsearch.xpack.ml.notifications.Auditor; -import org.junit.Before; -import org.mockito.ArgumentCaptor; - -import java.net.InetAddress; -import java.util.Collections; -import java.util.concurrent.ExecutorService; - -import static org.elasticsearch.mock.orig.Mockito.doAnswer; -import static org.elasticsearch.mock.orig.Mockito.times; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class MachineLearningTemplateRegistryTests extends ESTestCase { - private static final String CLUSTER_NAME = "clusterMcClusterFace"; - - private ClusterService clusterService; - private ExecutorService executorService; - private Client client; - private ThreadPool threadPool; - - @Before - public void setUpMocks() { - threadPool = mock(ThreadPool.class); - executorService = mock(ExecutorService.class); - clusterService = mock(ClusterService.class); - client = mock(Client.class); - - doAnswer(invocation -> { - ((Runnable) invocation.getArguments()[0]).run(); - return null; - }).when(executorService).execute(any(Runnable.class)); - when(threadPool.executor(ThreadPool.Names.GENERIC)).thenReturn(executorService); - } - - public void testAddsListener() throws Exception { - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(Settings.EMPTY, clusterService, client, threadPool); - - verify(clusterService, times(1)).addListener(templateRegistry); - } - - public void testAddTemplatesIfMissing() throws Exception { - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - ArgumentCaptor captor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class); - clientBuilder.putTemplate(captor); - - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(Settings.EMPTY, clusterService, clientBuilder.build(), threadPool); - - ClusterState cs = ClusterState.builder(new ClusterName("_name")) - .nodes(DiscoveryNodes.builder() - .add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT)) - .localNodeId("_node_id") - .masterNodeId("_node_id")) - .metaData(MetaData.builder()) - .build(); - templateRegistry.clusterChanged(new ClusterChangedEvent("_source", cs, cs)); - - verify(threadPool, times(4)).executor(anyString()); - assertFalse(templateRegistry.putMlNotificationsIndexTemplateCheck.get()); - assertFalse(templateRegistry.putMlMetaIndexTemplateCheck.get()); - assertFalse(templateRegistry.putMlNotificationsIndexTemplateCheck.get()); - assertFalse(templateRegistry.putResultsIndexTemplateCheck.get()); - } - - public void testAddTemplatesIfMissing_alreadyInitialized() throws Exception { - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - ArgumentCaptor captor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class); - clientBuilder.putTemplate(captor); - - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(Settings.EMPTY, clusterService, clientBuilder.build(), threadPool); - - ClusterState cs = ClusterState.builder(new ClusterName("_name")) - .nodes(DiscoveryNodes.builder() - .add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT)) - .localNodeId("_node_id") - .masterNodeId("_node_id")) - .metaData(MetaData.builder() - .put(IndexMetaData.builder(Auditor.NOTIFICATIONS_INDEX).settings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - )) - .put(IndexMetaData.builder(MlMetaIndex.INDEX_NAME).settings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - )) - .put(IndexMetaData.builder(AnomalyDetectorsIndex.jobStateIndexName()).settings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - )) - .put(IndexTemplateMetaData.builder(Auditor.NOTIFICATIONS_INDEX).version(Version.CURRENT.id).build()) - .put(IndexTemplateMetaData.builder(MlMetaIndex.INDEX_NAME).version(Version.CURRENT.id).build()) - .put(IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobStateIndexName()).version(Version.CURRENT.id).build()) - .put(IndexTemplateMetaData.builder( - AnomalyDetectorsIndex.jobResultsIndexPrefix()).version(Version.CURRENT.id).build()) - .putCustom(MlMetadata.TYPE, new MlMetadata.Builder().build())) - .build(); - templateRegistry.clusterChanged(new ClusterChangedEvent("_source", cs, cs)); - - verify(threadPool, times(0)).executor(anyString()); - assertFalse(templateRegistry.putMlNotificationsIndexTemplateCheck.get()); - assertFalse(templateRegistry.putMlMetaIndexTemplateCheck.get()); - assertFalse(templateRegistry.putStateIndexTemplateCheck.get()); - assertFalse(templateRegistry.putResultsIndexTemplateCheck.get()); - } - - public void testMlResultsIndexSettings() { - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(createSettings(), clusterService, client, threadPool); - Settings settings = templateRegistry.mlResultsIndexSettings().build(); - - assertEquals(3, settings.size()); - assertThat(settings.get("index.number_of_shards"), is(nullValue())); - assertEquals("async", settings.get("index.translog.durability")); - assertEquals("all_field_values", settings.get("index.query.default_field")); - assertEquals("2s", settings.get("index.unassigned.node_left.delayed_timeout")); - } - - public void testMlAuditIndexSettings() { - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(createSettings(), clusterService, client, threadPool); - Settings settings = templateRegistry.mlNotificationIndexSettings().build(); - - assertEquals(2, settings.size()); - assertEquals("1", settings.get("index.number_of_shards")); - assertEquals("2s", settings.get("index.unassigned.node_left.delayed_timeout")); - } - - public void testMlStateIndexSettings() { - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(createSettings(), clusterService, client, threadPool); - Settings settings = templateRegistry.mlStateIndexSettings().build(); - - assertEquals(2, settings.size()); - assertEquals("async", settings.get("index.translog.durability")); - assertEquals("2s", settings.get("index.unassigned.node_left.delayed_timeout")); - } - - public void testPutNotificationIndexTemplate() { - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - ArgumentCaptor captor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class); - clientBuilder.putTemplate(captor); - - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(createSettings(), clusterService, clientBuilder.build(), threadPool); - - templateRegistry.putNotificationMessageIndexTemplate((result, error) -> { - assertTrue(result); - PutIndexTemplateRequest request = captor.getValue(); - assertNotNull(request); - assertEquals(templateRegistry.mlNotificationIndexSettings().build(), request.settings()); - assertTrue(request.mappings().containsKey(AuditMessage.TYPE.getPreferredName())); - assertEquals(1, request.mappings().size()); - assertEquals(Collections.singletonList(Auditor.NOTIFICATIONS_INDEX), request.patterns()); - assertEquals(new Integer(Version.CURRENT.id), request.version()); - }); - } - - public void testPutMetaIndexTemplate() { - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - ArgumentCaptor captor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class); - clientBuilder.putTemplate(captor); - - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(createSettings(), clusterService, clientBuilder.build(), threadPool); - - templateRegistry.putMetaIndexTemplate((result, error) -> { - assertTrue(result); - PutIndexTemplateRequest request = captor.getValue(); - assertNotNull(request); - assertEquals(templateRegistry.mlNotificationIndexSettings().build(), request.settings()); - assertEquals(1, request.mappings().size()); - assertThat(request.mappings().containsKey("doc"), is(true)); - assertEquals(Collections.singletonList(MlMetaIndex.INDEX_NAME), request.patterns()); - assertEquals(new Integer(Version.CURRENT.id), request.version()); - }); - } - - public void testPutJobStateIndexTemplate() { - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - ArgumentCaptor captor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class); - clientBuilder.putTemplate(captor); - - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(createSettings(), clusterService, clientBuilder.build(), threadPool); - - templateRegistry.putJobStateIndexTemplate((result, error) -> { - assertTrue(result); - PutIndexTemplateRequest request = captor.getValue(); - assertNotNull(request); - assertEquals(templateRegistry.mlStateIndexSettings().build(), request.settings()); - assertTrue(request.mappings().containsKey(ElasticsearchMappings.DOC_TYPE)); - assertEquals(1, request.mappings().size()); - assertEquals(Collections.singletonList(AnomalyDetectorsIndex.jobStateIndexName()), request.patterns()); - assertEquals(new Integer(Version.CURRENT.id), request.version()); - }); - } - - public void testPutJobResultsIndexTemplate() { - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME); - ArgumentCaptor captor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class); - clientBuilder.putTemplate(captor); - - MachineLearningTemplateRegistry templateRegistry = - new MachineLearningTemplateRegistry(createSettings(), clusterService, clientBuilder.build(), threadPool); - - templateRegistry.putJobResultsIndexTemplate((result, error) -> { - assertTrue(result); - PutIndexTemplateRequest request = captor.getValue(); - assertNotNull(request); - assertEquals(templateRegistry.mlResultsIndexSettings().build(), request.settings()); - assertTrue(request.mappings().containsKey("doc")); - assertEquals(1, request.mappings().size()); - assertEquals(Collections.singletonList(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*"), request.patterns()); - assertEquals(new Integer(Version.CURRENT.id), request.version()); - }); - } - - public void testTemplateIsPresentAndUpToDate() { - // missing template - MetaData metaData = MetaData.builder().build(); - assertFalse(MachineLearningTemplateRegistry.templateIsPresentAndUpToDate(Auditor.NOTIFICATIONS_INDEX, metaData)); - - // old version of template - IndexTemplateMetaData templateMetaData = IndexTemplateMetaData.builder(Auditor.NOTIFICATIONS_INDEX) - .version(Version.CURRENT.id - 1).build(); - metaData = MetaData.builder().put(templateMetaData).build(); - assertFalse(MachineLearningTemplateRegistry.templateIsPresentAndUpToDate(Auditor.NOTIFICATIONS_INDEX, metaData)); - - // latest template - templateMetaData = IndexTemplateMetaData.builder(Auditor.NOTIFICATIONS_INDEX) - .version(Version.CURRENT.id).build(); - metaData = MetaData.builder().put(templateMetaData).build(); - assertTrue(MachineLearningTemplateRegistry.templateIsPresentAndUpToDate(Auditor.NOTIFICATIONS_INDEX, metaData)); - } - - public void testAllTemplatesInstalled() { - MetaData metaData = MetaData.builder() - .put(IndexTemplateMetaData.builder(Auditor.NOTIFICATIONS_INDEX).version(Version.CURRENT.id).build()) - .put(IndexTemplateMetaData.builder(MlMetaIndex.INDEX_NAME).version(Version.CURRENT.id).build()) - .put(IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobStateIndexName()).version(Version.CURRENT.id).build()) - .put(IndexTemplateMetaData.builder( - AnomalyDetectorsIndex.jobResultsIndexPrefix()).version(Version.CURRENT.id).build()).build(); - - assertTrue(MachineLearningTemplateRegistry.allTemplatesInstalled(metaData)); - } - - public void testAllTemplatesInstalled_OneMissing() { - MetaData.Builder metaDataBuilder = MetaData.builder(); - - String missing = randomFrom(MachineLearningTemplateRegistry.TEMPLATE_NAMES); - for (String templateName : MachineLearningTemplateRegistry.TEMPLATE_NAMES) { - if (templateName.equals(missing)) { - continue; - } - metaDataBuilder.put(IndexTemplateMetaData.builder(templateName).version(Version.CURRENT.id).build()); - } - assertFalse(MachineLearningTemplateRegistry.allTemplatesInstalled(metaDataBuilder.build())); - } - - private Settings createSettings() { - return Settings.builder() - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(2)) - .put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), 1001L) - .build(); - } -} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlDailyManagementServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlDailyManagementServiceTests.java index 907929114a1..2c6f208c9b5 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlDailyManagementServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlDailyManagementServiceTests.java @@ -22,6 +22,7 @@ import static org.elasticsearch.mock.orig.Mockito.verify; import static org.mockito.Matchers.any; import static org.mockito.Matchers.same; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class MlDailyManagementServiceTests extends ESTestCase { @@ -32,6 +33,7 @@ public class MlDailyManagementServiceTests extends ESTestCase { public void setUpTests() { threadPool = new TestThreadPool("MlDailyManagementServiceTests"); client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); } @After diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java index ddbb0e3f51c..ff21c6e192b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -31,7 +32,6 @@ import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.Assignment; import org.elasticsearch.xpack.persistent.PersistentTasksService; -import org.elasticsearch.xpack.security.InternalClient; import java.util.ArrayList; import java.util.Arrays; @@ -292,7 +292,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa CloseJobAction.TransportAction transportAction = new CloseJobAction.TransportAction(Settings.EMPTY, mock(TransportService.class), mock(ThreadPool.class), mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), - clusterService, mock(InternalClient.class), mock(Auditor.class), mock(PersistentTasksService.class)); + clusterService, mock(Client.class), mock(Auditor.class), mock(PersistentTasksService.class)); AtomicBoolean gotResponse = new AtomicBoolean(false); CloseJobAction.Request request = new Request("foo"); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java index 6588bb2a5d6..c5a7b23c6e4 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java @@ -8,8 +8,11 @@ package org.elasticsearch.xpack.ml.datafeed; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.job.config.DataDescription; import org.elasticsearch.xpack.ml.job.config.Job; @@ -45,6 +48,10 @@ public class DatafeedJobBuilderTests extends ESTestCase { @Before public void init() { client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(client.settings()).thenReturn(Settings.EMPTY); auditor = mock(Auditor.class); jobProvider = mock(JobProvider.class); taskHandler = mock(Consumer.class); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java index c5ef5bfda38..f95b17b7cbd 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java @@ -8,9 +8,12 @@ package org.elasticsearch.xpack.ml.datafeed; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.action.FlushJobAction; import org.elasticsearch.xpack.ml.action.PostDataAction; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; @@ -66,6 +69,9 @@ public class DatafeedJobTests extends ESTestCase { dataExtractor = mock(DataExtractor.class); when(dataExtractorFactory.newExtractor(anyLong(), anyLong())).thenReturn(dataExtractor); client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); dataDescription = new DataDescription.Builder(); dataDescription.setFormat(DataDescription.DataFormat.XCONTENT); postDataFuture = mock(ActionFuture.class); @@ -225,6 +231,9 @@ public class DatafeedJobTests extends ESTestCase { public void testPostAnalysisProblem() throws Exception { client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(client.execute(same(FlushJobAction.INSTANCE), any())).thenReturn(flushJobFuture); when(client.execute(same(PostDataAction.INSTANCE), any())).thenThrow(new RuntimeException()); @@ -248,6 +257,9 @@ public class DatafeedJobTests extends ESTestCase { public void testPostAnalysisProblemIsConflict() throws Exception { client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(client.execute(same(FlushJobAction.INSTANCE), any())).thenReturn(flushJobFuture); when(client.execute(same(PostDataAction.INSTANCE), any())).thenThrow(ExceptionsHelper.conflictStatusException("conflict")); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java index a5dd9f41524..1f09aafb522 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java @@ -10,10 +10,13 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.datafeed.ChunkingConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedManagerTests; @@ -44,6 +47,9 @@ public class DataExtractorFactoryTests extends ESTestCase { @Before public void setUpTests() { client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); fieldsCapabilities = mock(FieldCapabilitiesResponse.class); givenAggregatableField("time", "date"); givenAggregatableField("field", "keyword"); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 09253aac823..0cb83c7cb7c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -5,21 +5,18 @@ */ package org.elasticsearch.xpack.ml.integration; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; -import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.XPackSingleNodeTestCase; -import org.elasticsearch.xpack.ml.MachineLearningTemplateRegistry; +import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.action.DeleteJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.util.QueryPage; @@ -64,11 +61,8 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicReference; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -256,22 +250,11 @@ public class AutodetectResultProcessorIT extends XPackSingleNodeTestCase { } private void putIndexTemplates() throws Exception { - ThreadPool threadPool = mock(ThreadPool.class); - ExecutorService executorService = mock(ExecutorService.class); - doAnswer(invocation -> { - ((Runnable) invocation.getArguments()[0]).run(); - return null; - }).when(executorService).execute(any(Runnable.class)); - when(threadPool.executor(ThreadPool.Names.GENERIC)).thenReturn(executorService); - - new MachineLearningTemplateRegistry(Settings.EMPTY, mock(ClusterService.class), client(), threadPool) - .addTemplatesIfMissing(client().admin().cluster().state(new ClusterStateRequest().all()).actionGet().getState()); - // block until the templates are installed assertBusy(() -> { - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); + ClusterState state = client().admin().cluster().prepareState().get().getState(); assertTrue("Timed out waiting for the ML templates to be installed", - MachineLearningTemplateRegistry.allTemplatesInstalled(metaData)); + MachineLearning.allTemplatesInstalled(state)); }); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java index 62d1c04b274..0e781b4dbf1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -30,6 +29,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilder; @@ -37,6 +37,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.job.config.Job; @@ -870,6 +871,9 @@ public class JobProviderTests extends ESTestCase { private Client getMockedClient(Consumer queryBuilderConsumer, SearchResponse response) { Client client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); doAnswer(invocationOnMock -> { MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocationOnMock.getArguments()[0]; queryBuilderConsumer.accept(multiSearchRequest.requests().get(0).source().query()); @@ -891,20 +895,4 @@ public class JobProviderTests extends ESTestCase { }).when(client).search(any(), any()); return client; } - - private Client getMockedClient(GetResponse response) { - Client client = mock(Client.class); - @SuppressWarnings("unchecked") - ActionFuture actionFuture = mock(ActionFuture.class); - when(client.get(any())).thenReturn(actionFuture); - when(actionFuture.actionGet()).thenReturn(response); - - doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[1]; - actionListener.onResponse(response); - return null; - }).when(client).get(any(), any()); - return client; - } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersisterTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersisterTests.java index 584c9023b95..0e500c999b5 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersisterTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersisterTests.java @@ -53,6 +53,7 @@ public class JobRenormalizedResultsPersisterTests extends ESTestCase { } verify(client, times(1)).bulk(any()); + verify(client, times(1)).threadPool(); verifyNoMoreInteractions(client); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index eb79512cb66..59bf73f955d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -12,7 +12,9 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.ml.job.results.Bucket; import org.elasticsearch.xpack.ml.job.results.BucketInfluencer; @@ -189,12 +191,16 @@ public class JobResultsPersisterTests extends ESTestCase { } verify(client, times(1)).bulk(any()); + verify(client, times(1)).threadPool(); verifyNoMoreInteractions(client); } @SuppressWarnings({"unchecked", "rawtypes"}) private Client mockClient(ArgumentCaptor captor) { Client client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); ActionFuture future = mock(ActionFuture.class); when(future.actionGet()).thenReturn(new BulkResponse(new BulkItemResponse[0], 0L)); when(client.bulk(captor.capture())).thenReturn(future); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java index a689adfcca3..289d784cb99 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; @@ -43,10 +44,12 @@ import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.action.DeleteJobAction; import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock; @@ -89,6 +92,9 @@ public class MockClientBuilder { when(adminClient.indices()).thenReturn(indicesAdminClient); Settings settings = Settings.builder().put("cluster.name", clusterName).build(); when(client.settings()).thenReturn(settings); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); } @SuppressWarnings({ "unchecked" }) @@ -302,11 +308,11 @@ public class MockClientBuilder { @Override public Void answer(InvocationOnMock invocationOnMock) throws Throwable { ActionListener listener = - (ActionListener) invocationOnMock.getArguments()[0]; + (ActionListener) invocationOnMock.getArguments()[1]; listener.onResponse(mock(IndicesAliasesResponse.class)); return null; } - }).when(aliasesRequestBuilder).execute(any()); + }).when(indicesAdminClient).aliases(any(IndicesAliasesRequest.class), any(ActionListener.class)); return this; } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java index c1a8066d408..804573f11e1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java @@ -9,7 +9,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.action.UpdateJobAction; import org.elasticsearch.xpack.ml.job.config.JobUpdate; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; @@ -66,6 +69,9 @@ public class AutoDetectResultProcessorTests extends ESTestCase { @Before public void setUpMocks() { client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); renormalizer = mock(Renormalizer.class); persister = mock(JobResultsPersister.class); jobProvider = mock(JobProvider.class); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessorTests.java index 70cfb6f8262..31b96d8393d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessorTests.java @@ -12,8 +12,10 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -61,6 +63,9 @@ public class StateProcessorTests extends ESTestCase { ActionFuture bulkResponseFuture = mock(ActionFuture.class); stateProcessor = spy(new StateProcessor(Settings.EMPTY, client)); when(client.bulk(any(BulkRequest.class))).thenReturn(bulkResponseFuture); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); } @After @@ -80,6 +85,7 @@ public class StateProcessorTests extends ESTestCase { assertEquals(threeStates[1], capturedBytes.get(1).utf8ToString()); assertEquals(threeStates[2], capturedBytes.get(2).utf8ToString()); verify(client, times(3)).bulk(any(BulkRequest.class)); + verify(client, times(3)).threadPool(); } public void testStateReadGivenConsecutiveZeroBytes() throws IOException { @@ -122,5 +128,6 @@ public class StateProcessorTests extends ESTestCase { stateProcessor.process("_id", stream); verify(stateProcessor, times(NUM_LARGE_DOCS)).persist(eq("_id"), any()); verify(client, times(NUM_LARGE_DOCS)).bulk(any(BulkRequest.class)); + verify(client, times(NUM_LARGE_DOCS)).threadPool(); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java index 8e8eb7fdea8..18128f6d6d0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java @@ -10,11 +10,14 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobTests; @@ -55,6 +58,19 @@ public class ExpiredResultsRemoverTests extends ESTestCase { clusterState = mock(ClusterState.class); when(clusterService.state()).thenReturn(clusterState); client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + doAnswer(new Answer() { + @Override + public Void answer(InvocationOnMock invocationOnMock) throws Throwable { + capturedDeleteByQueryRequests.add((DeleteByQueryRequest) invocationOnMock.getArguments()[1]); + ActionListener listener = + (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(null); + return null; + } + }).when(client).execute(same(DeleteByQueryAction.INSTANCE), any(), any()); listener = mock(ActionListener.class); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/notifications/AuditorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/notifications/AuditorTests.java index 941ba279a19..b14f5fbb1f4 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/notifications/AuditorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/notifications/AuditorTests.java @@ -10,11 +10,14 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -33,6 +36,9 @@ public class AuditorTests extends ESTestCase { @Before public void setUpMocks() { client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); clusterService = mock(ClusterService.class); DiscoveryNode dNode = mock(DiscoveryNode.class); when(dNode.getName()).thenReturn("this_node_has_a_name"); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index c79e30de2db..f454a6e6da0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -26,7 +27,6 @@ import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.MachineLearningTemplateRegistry; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.action.CloseJobAction; import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction; @@ -113,9 +113,9 @@ public abstract class BaseMlIntegTestCase extends ESIntegTestCase { @Before public void ensureTemplatesArePresent() throws Exception { assertBusy(() -> { - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); + ClusterState state = client().admin().cluster().prepareState().get().getState(); assertTrue("Timed out waiting for the ML templates to be installed", - MachineLearningTemplateRegistry.allTemplatesInstalled(metaData)); + MachineLearning.allTemplatesInstalled(state)); }); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java index 63319671632..8325fcf2f7e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.exporter.Exporters; import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; -import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; @@ -23,6 +22,7 @@ import org.joda.time.format.DateTimeFormatter; import java.util.Locale; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; +import static org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry.INDEX_TEMPLATE_VERSION; @ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0.0) public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTestCase { @@ -196,7 +196,7 @@ public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTest * Creates a watcher history index from the current version. */ protected void createWatcherHistoryIndex(final DateTime creationDate) { - createWatcherHistoryIndex(creationDate, WatcherIndexTemplateRegistry.INDEX_TEMPLATE_VERSION); + createWatcherHistoryIndex(creationDate, INDEX_TEMPLATE_VERSION); } /** diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/BaseCollectorTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/BaseCollectorTestCase.java index 67ea2eeaf02..07e2cc7751f 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/BaseCollectorTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/BaseCollectorTestCase.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.monitoring.collector; import org.elasticsearch.Version; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; @@ -17,11 +18,12 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.monitoring.Monitoring; -import org.elasticsearch.xpack.security.InternalClient; import java.util.function.Function; @@ -36,7 +38,7 @@ public abstract class BaseCollectorTestCase extends ESTestCase { protected DiscoveryNodes nodes; protected MetaData metaData; protected XPackLicenseState licenseState; - protected InternalClient client; + protected Client client; protected Settings settings; @Override @@ -48,7 +50,10 @@ public abstract class BaseCollectorTestCase extends ESTestCase { nodes = mock(DiscoveryNodes.class); metaData = mock(MetaData.class); licenseState = mock(XPackLicenseState.class); - client = mock(InternalClient.class); + client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); settings = Settings.EMPTY; } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollectorTests.java index a52d14ec19b..0909741ffd4 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollectorTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.ml.action.GetJobsStatsAction.Request; import org.elasticsearch.xpack.ml.action.GetJobsStatsAction.Response; @@ -119,6 +120,7 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { final String clusterUuid = randomAlphaOfLength(5); final MonitoringDoc.Node node = randomMonitoringNode(random()); final MachineLearningClient client = mock(MachineLearningClient.class); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); withCollectionTimeout(JobStatsCollector.JOB_STATS_TIMEOUT, timeout); @@ -127,7 +129,7 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { when(clusterState.metaData()).thenReturn(metaData); when(metaData.clusterUUID()).thenReturn(clusterUuid); - final JobStatsCollector collector = new JobStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final JobStatsCollector collector = new JobStatsCollector(Settings.EMPTY, clusterService, licenseState, client, threadContext); assertEquals(timeout, collector.getCollectionTimeout()); final List jobStats = mockJobStats(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ExportersTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ExportersTests.java index 92012f527f5..242814e49ed 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ExportersTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ExportersTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.monitoring.MonitoredSystem; import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.exporter.local.LocalExporter; -import org.elasticsearch.xpack.security.InternalClient; import org.junit.Before; import java.io.IOException; @@ -43,7 +42,6 @@ import static java.util.Collections.singleton; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -70,7 +68,6 @@ public class ExportersTests extends ESTestCase { threadContext = new ThreadContext(Settings.EMPTY); when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(threadContext); - InternalClient internalClient = new InternalClient(Settings.EMPTY, threadPool, client); clusterService = mock(ClusterService.class); // default state.version() will be 0, which is "valid" state = mock(ClusterState.class); @@ -80,7 +77,7 @@ public class ExportersTests extends ESTestCase { when(clusterService.state()).thenReturn(state); // we always need to have the local exporter as it serves as the default one - factories.put(LocalExporter.TYPE, config -> new LocalExporter(config, internalClient, mock(CleanerService.class))); + factories.put(LocalExporter.TYPE, config -> new LocalExporter(config, client, mock(CleanerService.class))); exporters = new Exporters(Settings.EMPTY, factories, clusterService, licenseState, threadContext); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTestCase.java index b4373a43646..63ba263eb66 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTestCase.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; -import org.elasticsearch.xpack.security.InternalClient; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -103,7 +102,7 @@ public abstract class LocalExporterIntegTestCase extends MonitoringIntegTestCase final CleanerService cleanerService = new CleanerService(settings, clusterService().getClusterSettings(), THREADPOOL, licenseState); - return new LocalExporter(config, new InternalClient(settings, THREADPOOL, client()), cleanerService); + return new LocalExporter(config, client(), cleanerService); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/security/MonitoringInternalClientTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/security/MonitoringInternalClientTests.java deleted file mode 100644 index fe78753ba30..00000000000 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/security/MonitoringInternalClientTests.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.monitoring.security; - -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.xpack.monitoring.MonitoringService; -import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; -import org.elasticsearch.xpack.security.InternalClient; - -import java.util.stream.Collectors; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; - -public class MonitoringInternalClientTests extends MonitoringIntegTestCase { - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(MonitoringService.INTERVAL.getKey(), "-1") - .build(); - } - - public void testAllowedAccess() { - InternalClient internalClient = internalCluster().getInstance(InternalClient.class); - - assertAccessIsAllowed(internalClient.admin().cluster().prepareHealth()); - assertAccessIsAllowed(internalClient.admin().cluster().prepareClusterStats()); - assertAccessIsAllowed(internalClient.admin().cluster().prepareState()); - assertAccessIsAllowed(internalClient.admin().cluster().prepareNodesInfo()); - assertAccessIsAllowed(internalClient.admin().cluster().prepareNodesStats()); - assertAccessIsAllowed(internalClient.admin().cluster().prepareNodesHotThreads()); - - assertAccessIsAllowed(internalClient.admin().indices().prepareGetSettings()); - assertAccessIsAllowed(internalClient.admin().indices().prepareSegments()); - assertAccessIsAllowed(internalClient.admin().indices().prepareRecoveries()); - assertAccessIsAllowed(internalClient.admin().indices().prepareStats()); - - assertAccessIsAllowed(internalClient.admin().indices().prepareDelete(MONITORING_INDICES_PREFIX + "*")); - assertAccessIsAllowed(internalClient.admin().indices().prepareCreate(MONITORING_INDICES_PREFIX + "test")); - - assertAccessIsAllowed(internalClient.admin().indices().preparePutTemplate("foo") - .setSource(new BytesArray(randomTemplateSource()), XContentType.JSON)); - assertAccessIsAllowed(internalClient.admin().indices().prepareGetTemplates("foo")); - } - - public void testAllowAllAccess() { - InternalClient internalClient = internalCluster().getInstance(InternalClient.class); - assertAcked(internalClient.admin().indices().preparePutTemplate("foo") - .setSource(new BytesArray(randomTemplateSource()), XContentType.JSON).get()); - - assertAccessIsAllowed(internalClient.admin().indices().prepareDeleteTemplate("foo")); - assertAccessIsAllowed(internalClient.admin().cluster().prepareGetRepositories()); - } - - private static void assertAccessIsAllowed(ActionRequestBuilder request) { - request.get(); - } - - /** - * @return the source of a random monitoring template - */ - private String randomTemplateSource() { - return randomFrom(monitoringTemplates().stream().map(Tuple::v2).collect(Collectors.toList())); - } -} - diff --git a/plugin/src/test/java/org/elasticsearch/xpack/persistent/TestPersistentTasksPlugin.java b/plugin/src/test/java/org/elasticsearch/xpack/persistent/TestPersistentTasksPlugin.java index 980ac6484fc..b331e8bdc14 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/persistent/TestPersistentTasksPlugin.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/persistent/TestPersistentTasksPlugin.java @@ -50,7 +50,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.Assignment; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.PersistentTask; -import org.elasticsearch.xpack.security.InternalClient; import java.io.IOException; import java.util.ArrayList; @@ -91,8 +90,7 @@ public class TestPersistentTasksPlugin extends Plugin implements ActionPlugin { ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { - InternalClient internalClient = new InternalClient(Settings.EMPTY, threadPool, client); - PersistentTasksService persistentTasksService = new PersistentTasksService(Settings.EMPTY, clusterService, threadPool, internalClient); + PersistentTasksService persistentTasksService = new PersistentTasksService(Settings.EMPTY, clusterService, threadPool, client); TestPersistentTasksExecutor testPersistentAction = new TestPersistentTasksExecutor(Settings.EMPTY, clusterService); PersistentTasksExecutorRegistry persistentTasksExecutorRegistry = new PersistentTasksExecutorRegistry(Settings.EMPTY, Collections.singletonList(testPersistentAction)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/InternalClientTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/InternalClientTests.java deleted file mode 100644 index 3cb9e6d64e3..00000000000 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/InternalClientTests.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security; - -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; - -import java.io.IOException; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - -public class InternalClientTests extends ESTestCase { - private ThreadPool threadPool; - - @Override - public void setUp() throws Exception { - super.setUp(); - threadPool = new TestThreadPool(InternalClientTests.class.getName()); - } - - @Override - public void tearDown() throws Exception { - super.tearDown(); - ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); - } - - public void testContextIsPreserved() throws IOException, InterruptedException { - FilterClient dummy = new FilterClient(Settings.EMPTY, threadPool, null) { - @Override - protected > void doExecute(Action - action, Request request, - ActionListener listener) { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> listener.onResponse(null)); - } - }; - - InternalClient client = new InternalClient(Settings.EMPTY, threadPool, dummy) { - @Override - protected void processContext(ThreadContext threadContext) { - threadContext.putTransient("foo", "boom"); - } - }; - try (ThreadContext.StoredContext ctx = threadPool.getThreadContext().stashContext()) { - threadPool.getThreadContext().putTransient("foo", "bar"); - client.prepareSearch("boom").get(); - assertEquals("bar", threadPool.getThreadContext().getTransient("foo")); - } - - try (ThreadContext.StoredContext ctx = threadPool.getThreadContext().stashContext()) { - threadPool.getThreadContext().putTransient("foo", "bar"); - CountDownLatch latch = new CountDownLatch(1); - client.prepareSearch("boom").execute(new ActionListener() { - @Override - public void onResponse(SearchResponse searchResponse) { - try { - assertEquals("bar", threadPool.getThreadContext().getTransient("foo")); - } finally { - latch.countDown(); - } - } - - @Override - public void onFailure(Exception e) { - try { - throw new AssertionError(e); - } finally { - latch.countDown(); - } - - } - }); - latch.await(); - assertEquals("bar", threadPool.getThreadContext().getTransient("foo")); - } - } - - public void testContextIsPreservedOnError() throws IOException, InterruptedException { - FilterClient dummy = new FilterClient(Settings.EMPTY, threadPool, null) { - @Override - protected > void doExecute(Action - action, Request request, - ActionListener listener) { - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> listener.onFailure(new Exception("boom bam bang"))); - } - }; - - InternalClient client = new InternalClient(Settings.EMPTY, threadPool, dummy) { - @Override - protected void processContext(ThreadContext threadContext) { - threadContext.putTransient("foo", "boom"); - } - }; - try (ThreadContext.StoredContext ctx = threadPool.getThreadContext().stashContext()) { - threadPool.getThreadContext().putTransient("foo", "bar"); - try { - client.prepareSearch("boom").get(); - } catch (Exception ex) { - assertEquals("boom bam bang", ex.getCause().getCause().getMessage()); - - } - assertEquals("bar", threadPool.getThreadContext().getTransient("foo")); - } - - try (ThreadContext.StoredContext ctx = threadPool.getThreadContext().stashContext()) { - threadPool.getThreadContext().putTransient("foo", "bar"); - CountDownLatch latch = new CountDownLatch(1); - client.prepareSearch("boom").execute(new ActionListener() { - @Override - public void onResponse(SearchResponse searchResponse) { - try { - throw new AssertionError("exception expected"); - } finally { - latch.countDown(); - } - - } - - @Override - public void onFailure(Exception e) { - try { - assertEquals("boom bam bang", e.getMessage()); - assertEquals("bar", threadPool.getThreadContext().getTransient("foo")); - } finally { - latch.countDown(); - } - - } - }); - latch.await(); - assertEquals("bar", threadPool.getThreadContext().getTransient("foo")); - } - } -} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/InternalClientIntegTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java similarity index 86% rename from plugin/src/test/java/org/elasticsearch/xpack/security/InternalClientIntegTests.java rename to plugin/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index 2c5418308b9..14e33faad19 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/InternalClientIntegTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -12,12 +12,15 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.mockito.stubbing.Answer; import java.util.ArrayList; @@ -31,9 +34,10 @@ import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; -public class InternalClientIntegTests extends ESSingleNodeTestCase { +public class ScrollHelperIntegTests extends ESSingleNodeTestCase { public void testFetchAllEntities() throws ExecutionException, InterruptedException { Client client = client(); @@ -50,7 +54,7 @@ public class InternalClientIntegTests extends ESSingleNodeTestCase { .request(); request.indicesOptions().ignoreUnavailable(); PlainActionFuture> future = new PlainActionFuture<>(); - InternalClient.fetchAllByEntity(client(), request, future, + ScrollHelper.fetchAllByEntity(client(), request, future, (hit) -> Integer.parseInt(hit.getSourceAsMap().get("number").toString())); Collection integers = future.actionGet(); ArrayList list = new ArrayList<>(integers); @@ -63,13 +67,16 @@ public class InternalClientIntegTests extends ESSingleNodeTestCase { /** * Tests that - * {@link InternalClient#fetchAllByEntity(Client, SearchRequest, org.elasticsearch.action.ActionListener, java.util.function.Function)} + * {@link ScrollHelper#fetchAllByEntity(Client, SearchRequest, ActionListener, Function)} * defends against scrolls broken in such a way that the remote Elasticsearch returns infinite results. While Elasticsearch * shouldn't do this it has in the past and it is very when it does. It takes out the whole node. So * this makes sure we defend against it properly. */ public void testFetchAllByEntityWithBrokenScroll() { Client client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); SearchRequest request = new SearchRequest(); String scrollId = randomAlphaOfLength(5); @@ -91,7 +98,7 @@ public class InternalClientIntegTests extends ESSingleNodeTestCase { doAnswer(returnResponse).when(client).searchScroll(anyObject(), anyObject()); AtomicReference failure = new AtomicReference<>(); - InternalClient.fetchAllByEntity(client, request, new ActionListener>() { + ScrollHelper.fetchAllByEntity(client, request, new ActionListener>() { @Override public void onResponse(Collection response) { fail("This shouldn't succeed."); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java index 242d5134331..597396f58f2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.client.FilterClient; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -64,11 +65,7 @@ public class SecurityLifecycleServiceTests extends ESTestCase { threadPool = new TestThreadPool("security template service tests"); transportClient = new MockTransportClient(Settings.EMPTY); - class IClient extends InternalSecurityClient { - IClient(Client transportClient) { - super(Settings.EMPTY, null, transportClient); - } - + Client client = new FilterClient(transportClient) { @Override protected listener) { listeners.add(listener); } - } - - InternalSecurityClient client = new IClient(transportClient); + }; securityLifecycleService = new SecurityLifecycleService(Settings.EMPTY, clusterService, threadPool, client, mock(IndexAuditTrail.class)); listeners = new CopyOnWriteArrayList<>(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index a938ad2c482..19ed877fe1c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -95,7 +95,10 @@ public class SecurityTests extends ESTestCase { ClusterSettings clusterSettings = new ClusterSettings(settings, allowedSettings); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); when(threadPool.relativeTimeInMillis()).thenReturn(1L); - return security.createComponents(mock(Client.class), threadPool, clusterService, mock(ResourceWatcherService.class), + Client client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + when(client.settings()).thenReturn(settings); + return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), Arrays.asList(extensions)); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index 093604948ca..9a79c171ce2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -157,6 +157,7 @@ public class SecurityActionFilterTests extends ESTestCase { final String action = "internal:foo"; if (hasExistingAuthentication) { threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + threadContext.putHeader(Authentication.AUTHENTICATION_KEY, "foo"); threadContext.putTransient(AuthorizationService.ORIGINATING_ACTION_KEY, "indices:foo"); } else { assertNull(threadContext.getTransient(Authentication.AUTHENTICATION_KEY)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index be06edbaa8d..dac7264aacb 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.xpack.security.InternalClient; +import org.elasticsearch.xpack.security.ScrollHelper; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.AuthenticationService; @@ -140,7 +140,7 @@ public class AuditTrailTests extends SecurityIntegTestCase { return eventsRef.get(); } private Collection> getAuditEvents() throws Exception { - final InternalClient client = internalSecurityClient(); + final Client client = client(); DateTime now = new DateTime(DateTimeZone.UTC); String indexName = IndexNameResolver.resolve(IndexAuditTrail.INDEX_NAME_PREFIX, now, IndexNameResolver.Rollover.DAILY); @@ -157,7 +157,7 @@ public class AuditTrailTests extends SecurityIntegTestCase { request.indicesOptions().ignoreUnavailable(); PlainActionFuture>> listener = new PlainActionFuture(); - InternalClient.fetchAllByEntity(client, request, listener, SearchHit::getSourceAsMap); + ScrollHelper.fetchAllByEntity(client, request, listener, SearchHit::getSourceAsMap); return listener.get(); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java index 5fbfbac037b..20d3adb439b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.client.FilterClient; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -26,7 +27,6 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.MockTransportClient; import org.elasticsearch.transport.TransportMessage; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail.State; import org.elasticsearch.xpack.security.authc.AuthenticationToken; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; @@ -42,7 +42,7 @@ import static org.mockito.Mockito.when; public class IndexAuditTrailMutedTests extends ESTestCase { - private InternalSecurityClient client; + private Client client; private TransportClient transportClient; private ThreadPool threadPool; private ClusterService clusterService; @@ -61,7 +61,7 @@ public class IndexAuditTrailMutedTests extends ESTestCase { threadPool = new TestThreadPool("index audit trail tests"); transportClient = new MockTransportClient(Settings.EMPTY); clientCalled = new AtomicBoolean(false); - class IClient extends InternalSecurityClient { + class IClient extends FilterClient { IClient(Client transportClient){ super(Settings.EMPTY, threadPool, transportClient); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java index 2bd4763e262..fb84eeac2e7 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java @@ -55,7 +55,6 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.Arrays; @@ -331,7 +330,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { when(nodes.isLocalNodeElectedMaster()).thenReturn(true); threadPool = new TestThreadPool("index audit trail tests"); enqueuedMessage = new SetOnce<>(); - auditor = new IndexAuditTrail(settings, internalSecurityClient(), threadPool, clusterService) { + auditor = new IndexAuditTrail(settings, client(), threadPool, clusterService) { @Override void enqueue(Message message, String type) { enqueuedMessage.set(message); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 3f32bd66988..89fcf3a2910 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -5,25 +5,13 @@ */ package org.elasticsearch.xpack.security.authc; -import java.io.IOException; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.time.Clock; -import java.util.Arrays; -import java.util.Base64; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicBoolean; - import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; @@ -49,7 +37,6 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportMessage; import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; @@ -63,6 +50,18 @@ import org.elasticsearch.xpack.security.user.User; import org.junit.After; import org.junit.Before; +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.time.Clock; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; + import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; import static org.elasticsearch.xpack.security.support.Exceptions.authenticationError; import static org.hamcrest.Matchers.arrayContaining; @@ -138,11 +137,12 @@ public class AuthenticationServiceTests extends ESTestCase { threadPool = new ThreadPool(settings, new FixedExecutorBuilder(settings, TokenService.THREAD_POOL_NAME, 1, 1000, "xpack.security.authc.token.thread_pool")); threadContext = threadPool.getThreadContext(); - InternalSecurityClient internalClient = new InternalSecurityClient(Settings.EMPTY, threadPool, client); + when(client.threadPool()).thenReturn(threadPool); + when(client.settings()).thenReturn(settings); lifecycleService = mock(SecurityLifecycleService.class); ClusterService clusterService = new ClusterService(settings, new ClusterSettings(settings, ClusterSettings .BUILT_IN_CLUSTER_SETTINGS), threadPool, Collections.emptyMap()); - tokenService = new TokenService(settings, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + tokenService = new TokenService(settings, Clock.systemUTC(), client, lifecycleService, clusterService); service = new AuthenticationService(settings, realms, auditTrail, new DefaultAuthenticationFailureHandler(), threadPool, new AnonymousUser(settings), tokenService); } @@ -865,13 +865,16 @@ public class AuthenticationServiceTests extends ESTestCase { final Authentication expected = new Authentication(user, new RealmRef("realm", "custom", "node"), null); String token = tokenService.getUserTokenString(tokenService.createUserToken(expected)); when(lifecycleService.isSecurityIndexAvailable()).thenReturn(true); + GetRequestBuilder getRequestBuilder = mock(GetRequestBuilder.class); + when(client.prepareGet(eq(SecurityLifecycleService.SECURITY_INDEX_NAME), eq("doc"), any(String.class))) + .thenReturn(getRequestBuilder); doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; GetResponse response = mock(GetResponse.class); when(response.isExists()).thenReturn(true); listener.onResponse(response); return Void.TYPE; - }).when(client).execute(eq(GetAction.INSTANCE), any(GetRequest.class), any(ActionListener.class)); + }).when(client).get(any(GetRequest.class), any(ActionListener.class)); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { threadContext.putHeader("Authorization", "Bearer " + token); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index e53f3a5309e..81b42f6ca6f 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.action.token.CreateTokenResponse; import org.elasticsearch.xpack.security.action.token.InvalidateTokenResponse; +import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.client.SecurityClient; import org.junit.After; import org.junit.Before; @@ -29,6 +30,7 @@ import org.junit.Before; import java.io.IOException; import java.io.UncheckedIOException; import java.time.Instant; +import java.util.Collections; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -56,7 +58,7 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { } public void testTokenServiceBootstrapOnNodeJoin() throws Exception { - final Client client = internalSecurityClient(); + final Client client = client(); SecurityClient securityClient = new SecurityClient(client); CreateTokenResponse response = securityClient.prepareCreateToken() .setGrantType("password") @@ -84,7 +86,7 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { public void testTokenServiceCanRotateKeys() throws Exception { - final Client client = internalSecurityClient(); + final Client client = client(); SecurityClient securityClient = new SecurityClient(client); CreateTokenResponse response = securityClient.prepareCreateToken() .setGrantType("password") @@ -116,7 +118,9 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { } public void testExpiredTokensDeletedAfterExpiration() throws Exception { - final Client client = internalSecurityClient(); + final Client client = client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING))); SecurityClient securityClient = new SecurityClient(client); CreateTokenResponse response = securityClient.prepareCreateToken() .setGrantType("password") diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index bdc61eb05b1..6ab6f22136c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; @@ -23,7 +24,6 @@ import org.elasticsearch.test.EqualsHashCodeTestUtils; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.security.authc.TokenService.BytesKey; @@ -49,7 +49,6 @@ import static org.mockito.Mockito.when; public class TokenServiceTests extends ESTestCase { - private InternalSecurityClient internalClient; private static ThreadPool threadPool; private static final Settings settings = Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "TokenServiceTests") .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), true).build(); @@ -63,7 +62,8 @@ public class TokenServiceTests extends ESTestCase { @Before public void setupClient() throws GeneralSecurityException { client = mock(Client.class); - internalClient = new InternalSecurityClient(settings, threadPool, client); + when(client.threadPool()).thenReturn(threadPool); + when(client.settings()).thenReturn(settings); lifecycleService = mock(SecurityLifecycleService.class); when(lifecycleService.isSecurityIndexWriteable()).thenReturn(true); doAnswer(invocationOnMock -> { @@ -92,7 +92,7 @@ public class TokenServiceTests extends ESTestCase { public void testAttachAndGetToken() throws Exception { TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); final UserToken token = tokenService.createUserToken(authentication); assertNotNull(token); @@ -109,7 +109,7 @@ public class TokenServiceTests extends ESTestCase { try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { // verify a second separate token service with its own salt can also verify - TokenService anotherService = new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService + TokenService anotherService = new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService , clusterService); anotherService.refreshMetaData(tokenService.getTokenMetaData()); PlainActionFuture future = new PlainActionFuture<>(); @@ -121,7 +121,7 @@ public class TokenServiceTests extends ESTestCase { public void testRotateKey() throws Exception { TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); final UserToken token = tokenService.createUserToken(authentication); assertNotNull(token); @@ -168,12 +168,12 @@ public class TokenServiceTests extends ESTestCase { public void testKeyExchange() throws Exception { TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); int numRotations = 0;randomIntBetween(1, 5); for (int i = 0; i < numRotations; i++) { rotateKeys(tokenService); } - TokenService otherTokenService = new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService, + TokenService otherTokenService = new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); otherTokenService.refreshMetaData(tokenService.getTokenMetaData()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); @@ -203,7 +203,7 @@ public class TokenServiceTests extends ESTestCase { public void testPruneKeys() throws Exception { TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); final UserToken token = tokenService.createUserToken(authentication); assertNotNull(token); @@ -260,7 +260,7 @@ public class TokenServiceTests extends ESTestCase { public void testPassphraseWorks() throws Exception { TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); final UserToken token = tokenService.createUserToken(authentication); assertNotNull(token); @@ -277,7 +277,7 @@ public class TokenServiceTests extends ESTestCase { try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { // verify a second separate token service with its own passphrase cannot verify - TokenService anotherService = new TokenService(Settings.EMPTY, Clock.systemUTC(), internalClient, lifecycleService, + TokenService anotherService = new TokenService(Settings.EMPTY, Clock.systemUTC(), client, lifecycleService, clusterService); PlainActionFuture future = new PlainActionFuture<>(); anotherService.getAndValidateToken(requestContext, future); @@ -288,19 +288,20 @@ public class TokenServiceTests extends ESTestCase { public void testInvalidatedToken() throws Exception { when(lifecycleService.isSecurityIndexAvailable()).thenReturn(true); TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); final UserToken token = tokenService.createUserToken(authentication); assertNotNull(token); + GetRequestBuilder getRequestBuilder = mock(GetRequestBuilder.class); + when(client.prepareGet(SecurityLifecycleService.SECURITY_INDEX_NAME, "doc", TokenService.DOC_TYPE + "_" + token.getId())) + .thenReturn(getRequestBuilder); doAnswer(invocationOnMock -> { - GetRequest request = (GetRequest) invocationOnMock.getArguments()[1]; - assertEquals(TokenService.DOC_TYPE + "_" + token.getId(), request.id()); - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; GetResponse response = mock(GetResponse.class); when(response.isExists()).thenReturn(true); listener.onResponse(response); return Void.TYPE; - }).when(client).execute(eq(GetAction.INSTANCE), any(GetRequest.class), any(ActionListener.class)); + }).when(client).get(any(GetRequest.class), any(ActionListener.class)); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -325,7 +326,7 @@ public class TokenServiceTests extends ESTestCase { public void testTokenExpiry() throws Exception { ClockMock clock = ClockMock.frozen(); - TokenService tokenService = new TokenService(tokenServiceEnabledSettings, clock, internalClient, lifecycleService, clusterService); + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, clock, client, lifecycleService, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); final UserToken token = tokenService.createUserToken(authentication); @@ -373,7 +374,7 @@ public class TokenServiceTests extends ESTestCase { TokenService tokenService = new TokenService(Settings.builder() .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), false) .build(), - Clock.systemUTC(), internalClient, lifecycleService, clusterService); + Clock.systemUTC(), client, lifecycleService, clusterService); IllegalStateException e = expectThrows(IllegalStateException.class, () -> tokenService.createUserToken(null)); assertEquals("tokens are not enabled", e.getMessage()); @@ -415,7 +416,7 @@ public class TokenServiceTests extends ESTestCase { final int numBytes = randomIntBetween(1, TokenService.MINIMUM_BYTES + 32); final byte[] randomBytes = new byte[numBytes]; random().nextBytes(randomBytes); - TokenService tokenService = new TokenService(Settings.EMPTY, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + TokenService tokenService = new TokenService(Settings.EMPTY, Clock.systemUTC(), client, lifecycleService, clusterService); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + Base64.getEncoder().encodeToString(randomBytes)); @@ -429,7 +430,7 @@ public class TokenServiceTests extends ESTestCase { public void testIndexNotAvailable() throws Exception { TokenService tokenService = - new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), internalClient, lifecycleService, clusterService); + new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, lifecycleService, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); final UserToken token = tokenService.createUserToken(authentication); assertNotNull(token); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index c92e2640b36..9dda5c0a1fa 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -22,14 +22,16 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.FilterClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.support.Hasher; @@ -55,12 +57,17 @@ public class NativeUsersStoreTests extends ESTestCase { private static final String PASSWORD_FIELD = User.Fields.PASSWORD.getPreferredName(); private static final String BLANK_PASSWORD = ""; - private InternalSecurityClient internalClient; + private Client client; private final List>> requests = new CopyOnWriteArrayList<>(); @Before public void setupMocks() { - internalClient = new InternalSecurityClient(Settings.EMPTY, null, null) { + Client mockClient = mock(Client.class); + when(mockClient.settings()).thenReturn(Settings.EMPTY); + ThreadPool threadPool = mock(ThreadPool.class); + when(mockClient.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + client = new FilterClient(mockClient) { @Override protected < @@ -238,7 +245,7 @@ public class NativeUsersStoreTests extends ESTestCase { listener.onResponse(null); return null; }).when(securityLifecycleService).createIndexIfNeededThenExecute(any(ActionListener.class), any(Runnable.class)); - return new NativeUsersStore(Settings.EMPTY, internalClient, securityLifecycleService); + return new NativeUsersStore(Settings.EMPTY, client, securityLifecycleService); } } \ No newline at end of file diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeUserRoleMapperTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeUserRoleMapperTests.java index 819b4f6ab1c..f73d0dfb680 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeUserRoleMapperTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeUserRoleMapperTests.java @@ -12,12 +12,11 @@ import java.util.Set; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; @@ -54,7 +53,7 @@ public class NativeUserRoleMapperTests extends ESTestCase { Collections.singletonList(FieldPredicate.create("cn=mutants,ou=groups,ou=dept_h,o=forces,dc=gc,dc=ca"))), Arrays.asList("mutants"), Collections.emptyMap(), false); - final InternalSecurityClient client = mock(InternalSecurityClient.class); + final Client client = mock(Client.class); final SecurityLifecycleService lifecycleService = mock(SecurityLifecycleService.class); when(lifecycleService.isSecurityIndexAvailable()).thenReturn(true); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java index 93c8419d1ce..32694f22fe8 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java @@ -5,15 +5,23 @@ */ package org.elasticsearch.xpack.security.authz; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.xpack.ClientHelper; +import org.elasticsearch.xpack.security.SecurityContext; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.security.user.SystemUser; import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.user.XPackSecurityUser; +import org.elasticsearch.xpack.security.user.XPackUser; import org.junit.Before; +import java.util.concurrent.CountDownLatch; +import java.util.function.Consumer; + import static org.hamcrest.Matchers.is; /** @@ -57,4 +65,84 @@ public class AuthorizationUtilsTests extends ESTestCase { threadContext.putTransient(AuthorizationService.ORIGINATING_ACTION_KEY, randomFrom("internal:foo/bar")); assertThat(AuthorizationUtils.shouldReplaceUserWithSystem(threadContext, "internal:something"), is(false)); } + + public void testShouldSetUser() { + assertFalse(AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadContext)); + + // put origin in context + threadContext.putTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME, randomAlphaOfLength(4)); + assertTrue(AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadContext)); + + // set authentication + User user = new User(randomAlphaOfLength(6), new String[] {}); + Authentication authentication = new Authentication(user, new RealmRef("test", "test", "foo"), null); + threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + assertFalse(AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadContext)); + + threadContext = new ThreadContext(Settings.EMPTY); + threadContext.putTransient(Authentication.AUTHENTICATION_KEY, authentication); + assertFalse(AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadContext)); + + threadContext = new ThreadContext(Settings.EMPTY); + threadContext.putHeader(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME, randomAlphaOfLength(4)); + assertFalse(AuthorizationUtils.shouldSetUserBasedOnActionOrigin(threadContext)); + } + + public void testSwitchAndExecuteXpackSecurityUser() throws Exception { + SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); + final String headerName = randomAlphaOfLengthBetween(4, 16); + final String headerValue = randomAlphaOfLengthBetween(4, 16); + final CountDownLatch latch = new CountDownLatch(2); + + final ActionListener listener = ActionListener.wrap(v -> { + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertNull(threadContext.getHeader(headerName)); + assertEquals(XPackSecurityUser.INSTANCE, securityContext.getAuthentication().getUser()); + latch.countDown(); + }, e -> fail(e.getMessage())); + + final Consumer consumer = original -> { + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertNull(threadContext.getHeader(headerName)); + assertEquals(XPackSecurityUser.INSTANCE, securityContext.getAuthentication().getUser()); + latch.countDown(); + listener.onResponse(null); + }; + threadContext.putHeader(headerName, headerValue); + threadContext.putTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME, ClientHelper.SECURITY_ORIGIN); + + AuthorizationUtils.switchUserBasedOnActionOriginAndExecute(threadContext, securityContext, consumer); + + latch.await(); + } + + public void testSwitchAndExecuteXpackUser() throws Exception { + SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); + final String headerName = randomAlphaOfLengthBetween(4, 16); + final String headerValue = randomAlphaOfLengthBetween(4, 16); + final CountDownLatch latch = new CountDownLatch(2); + + final ActionListener listener = ActionListener.wrap(v -> { + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertNull(threadContext.getHeader(headerName)); + assertEquals(XPackUser.INSTANCE, securityContext.getAuthentication().getUser()); + latch.countDown(); + }, e -> fail(e.getMessage())); + + final Consumer consumer = original -> { + assertNull(threadContext.getTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME)); + assertNull(threadContext.getHeader(headerName)); + assertEquals(XPackUser.INSTANCE, securityContext.getAuthentication().getUser()); + latch.countDown(); + listener.onResponse(null); + }; + threadContext.putHeader(headerName, headerValue); + threadContext.putTransient(ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME, + randomFrom(ClientHelper.ML_ORIGIN, ClientHelper.WATCHER_ORIGIN, ClientHelper.DEPRECATION_ORIGIN, + ClientHelper.MONITORING_ORIGIN, ClientHelper.PERSISTENT_TASK_ORIGIN)); + + AuthorizationUtils.switchUserBasedOnActionOriginAndExecute(threadContext, securityContext, consumer); + + latch.await(); + } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index e78a2301ff9..9856c74b5d6 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -37,8 +38,6 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.security.InternalClient; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail; @@ -185,14 +184,14 @@ public class NativeRolesStoreTests extends ESTestCase { } public void testPutOfRoleWithFlsDlsUnlicensed() throws IOException { - final InternalSecurityClient internalClient = mock(InternalSecurityClient.class); + final Client client = mock(Client.class); final ClusterService clusterService = mock(ClusterService.class); final XPackLicenseState licenseState = mock(XPackLicenseState.class); final AtomicBoolean methodCalled = new AtomicBoolean(false); final SecurityLifecycleService securityLifecycleService = - new SecurityLifecycleService(Settings.EMPTY, clusterService, threadPool, internalClient, + new SecurityLifecycleService(Settings.EMPTY, clusterService, threadPool, client, mock(IndexAuditTrail.class)); - final NativeRolesStore rolesStore = new NativeRolesStore(Settings.EMPTY, internalClient, licenseState, securityLifecycleService) { + final NativeRolesStore rolesStore = new NativeRolesStore(Settings.EMPTY, client, licenseState, securityLifecycleService) { @Override void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { if (methodCalled.compareAndSet(false, true)) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/support/IndexLifecycleManagerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/support/IndexLifecycleManagerTests.java index 64dd4fe08f3..1d043c74124 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/support/IndexLifecycleManagerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/support/IndexLifecycleManagerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.client.FilterClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -44,7 +45,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.security.InternalSecurityClient; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.elasticsearch.xpack.template.TemplateUtils; import org.hamcrest.Matchers; @@ -69,9 +69,11 @@ public class IndexLifecycleManagerTests extends ESTestCase { final Client mockClient = mock(Client.class); final ThreadPool threadPool = mock(ThreadPool.class); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(mockClient.threadPool()).thenReturn(threadPool); + when(mockClient.settings()).thenReturn(Settings.EMPTY); actions = new LinkedHashMap<>(); - final InternalSecurityClient client = new InternalSecurityClient(Settings.EMPTY, threadPool, mockClient) { + final Client client = new FilterClient(mockClient) { @Override protected sendingUser = new AtomicReference<>(); - AsyncSender intercepted = new AsyncSender() { - @Override - public void sendRequest(Transport.Connection connection, String action, TransportRequest request, - TransportRequestOptions options, TransportResponseHandler handler) { - if (calledWrappedSender.compareAndSet(false, true) == false) { - fail("sender called more than once!"); - } - sendingUser.set(securityContext.getUser()); - } - }; - AsyncSender sender = interceptor.interceptSender(intercepted); - Transport.Connection connection = mock(Transport.Connection.class); - final Version version = Version.fromId(randomIntBetween(Version.V_5_0_0_ID, Version.V_5_2_0_ID - 100)); - when(connection.getVersion()).thenReturn(version); - sender.sendRequest(connection, "indices:foo[s]", null, null, null); - assertTrue(calledWrappedSender.get()); - assertNotEquals(user, sendingUser.get()); - assertEquals(KibanaUser.NAME, sendingUser.get().principal()); - assertThat(sendingUser.get().roles(), arrayContaining("kibana")); - assertEquals(user, securityContext.getUser()); - - // reset and test with version that was changed - calledWrappedSender.set(false); - sendingUser.set(null); - when(connection.getVersion()).thenReturn(Version.V_5_2_0); - sender.sendRequest(connection, "indices:foo[s]", null, null, null); - assertTrue(calledWrappedSender.get()); - assertEquals(user, sendingUser.get()); - - // reset and disable reserved realm - calledWrappedSender.set(false); - sendingUser.set(null); - when(connection.getVersion()).thenReturn(Version.V_5_0_0); - settings = Settings.builder().put(settings).put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false).build(); - interceptor = new SecurityServerTransportInterceptor(settings, threadPool, - mock(AuthenticationService.class), mock(AuthorizationService.class), xPackLicenseState, mock(SSLService.class), - securityContext, new DestructiveOperations(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, - Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)))); - sender = interceptor.interceptSender(intercepted); - sender.sendRequest(connection, "indices:foo[s]", null, null, null); - assertTrue(calledWrappedSender.get()); - assertEquals(user, sendingUser.get()); - - verify(xPackLicenseState, times(3)).isAuthAllowed(); - verify(securityContext, times(1)).executeAsUser(any(User.class), any(Consumer.class), eq(version)); - verifyNoMoreInteractions(xPackLicenseState); - } - public void testSendToNewerVersionSetsCorrectVersion() throws Exception { final User authUser = randomBoolean() ? new User("authenticator") : null; final User user = new User("joe", randomRoles(), authUser); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index e6f6b1689aa..a52a42e4763 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -9,10 +9,13 @@ import org.apache.http.HttpStatus; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.plugins.MetaDataUpgrader; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; -import org.elasticsearch.xpack.ml.MachineLearningTemplateRegistry; +import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.integration.MlRestTestStateCleaner; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry; import org.junit.After; @@ -43,14 +46,14 @@ public class XPackRestIT extends XPackRestTestCase { } /** - * Waits for the Security template to be created by the {@link SecurityLifecycleService} and - * the Machine Learning templates to be created by {@link MachineLearningTemplateRegistry} + * Waits for the Security template and the Machine Learning templates to be created by the {@link MetaDataUpgrader} */ @Before public void waitForTemplates() throws Exception { List templates = new ArrayList<>(); templates.add(SecurityLifecycleService.SECURITY_TEMPLATE_NAME); - templates.addAll(Arrays.asList(MachineLearningTemplateRegistry.TEMPLATE_NAMES)); + templates.addAll(Arrays.asList(Auditor.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, AnomalyDetectorsIndex.jobStateIndexName(), + AnomalyDetectorsIndex.jobResultsIndexPrefix())); templates.addAll(Arrays.asList(WatcherIndexTemplateRegistry.TEMPLATE_NAMES)); for (String template : templates) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java index 16bd2abb505..19b6299a30b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java @@ -18,9 +18,13 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.xpack.ml.MachineLearningTemplateRegistry; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.notifications.Auditor; import java.io.IOException; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @@ -49,7 +53,7 @@ public abstract class XPackRestTestCase extends ESClientYamlSuiteTestCase { } /** - * Waits for the Machine Learning templates to be created by {@link MachineLearningTemplateRegistry}. + * Waits for the Machine Learning templates to be created by {@link org.elasticsearch.plugins.MetaDataUpgrader}. */ public static void waitForMlTemplates() throws InterruptedException { AtomicReference masterNodeVersion = new AtomicReference<>(); @@ -70,7 +74,9 @@ public abstract class XPackRestTestCase extends ESClientYamlSuiteTestCase { return false; }); - for (String template : MachineLearningTemplateRegistry.TEMPLATE_NAMES) { + final List templateNames = Arrays.asList(Auditor.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, + AnomalyDetectorsIndex.jobStateIndexName(), AnomalyDetectorsIndex.jobResultsIndexPrefix()); + for (String template : templateNames) { awaitBusy(() -> { Map response; try { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index d4cc7fd2e27..e91d961c4e4 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -6,45 +6,26 @@ package org.elasticsearch.xpack.upgrade; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportResponse; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeAction; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction.Response; -import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry; import org.junit.Before; import java.util.Collections; -import java.util.List; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Collectors; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsEqual.equalTo; public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java index 560cc4c312d..284b5ac2d4a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; -import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry; import org.elasticsearch.xpack.watcher.watch.Watch; import org.junit.Before; import org.mockito.stubbing.Answer; @@ -43,6 +42,9 @@ import java.util.concurrent.ExecutorService; import static java.util.Arrays.asList; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME; +import static org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME; +import static org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry.WATCHES_TEMPLATE_NAME; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyString; @@ -85,9 +87,9 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { IndexRoutingTable watchRoutingTable = IndexRoutingTable.builder(new Index(Watch.INDEX, "foo")).build(); ClusterState clusterState = ClusterState.builder(new ClusterName("my-cluster")) .metaData(MetaData.builder() - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.WATCHES_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(HISTORY_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(TRIGGERED_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(WATCHES_TEMPLATE_NAME)) .build()) .nodes(new DiscoveryNodes.Builder().masterNodeId("node_1").localNodeId("node_1").add(newNode("node_1"))) .routingTable(RoutingTable.builder().add(watchRoutingTable).build()) @@ -122,9 +124,9 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { .nodes(new DiscoveryNodes.Builder().masterNodeId("node_1").localNodeId("node_1").add(newNode("node_1"))) .routingTable(RoutingTable.builder().add(watchRoutingTable).build()) .metaData(MetaData.builder() - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.WATCHES_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(HISTORY_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(TRIGGERED_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(WATCHES_TEMPLATE_NAME)) .build()) .build(); @@ -151,9 +153,9 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { clusterState = ClusterState.builder(new ClusterName("my-cluster")) .nodes(new DiscoveryNodes.Builder().masterNodeId("node_1").localNodeId("node_1").add(newNode("node_1"))) .metaData(MetaData.builder() - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.WATCHES_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(HISTORY_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(TRIGGERED_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(WATCHES_TEMPLATE_NAME)) .build()) .build(); when(watcherService.state()).thenReturn(WatcherState.STARTED); @@ -164,9 +166,9 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { ClusterState previousClusterState = ClusterState.builder(new ClusterName("my-cluster")) .nodes(new DiscoveryNodes.Builder().masterNodeId("node_1").localNodeId("node_1").add(newNode("node_1"))) .metaData(MetaData.builder() - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.WATCHES_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(HISTORY_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(TRIGGERED_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(WATCHES_TEMPLATE_NAME)) .build()) .build(); when(watcherService.validate(clusterState)).thenReturn(true); @@ -438,9 +440,9 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { ClusterState state = ClusterState.builder(new ClusterName("my-cluster")) .nodes(nodes) .metaData(MetaData.builder() - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME)) - .put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.WATCHES_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(HISTORY_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(TRIGGERED_TEMPLATE_NAME)) + .put(IndexTemplateMetaData.builder(WATCHES_TEMPLATE_NAME)) .build()) .build(); when(watcherService.validate(eq(state))).thenReturn(true); @@ -459,17 +461,17 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { MetaData.Builder metaDataBuilder = MetaData.builder(); boolean isHistoryTemplateAdded = randomBoolean(); if (isHistoryTemplateAdded) { - metaDataBuilder.put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME)); + metaDataBuilder.put(IndexTemplateMetaData.builder(HISTORY_TEMPLATE_NAME)); } boolean isTriggeredTemplateAdded = randomBoolean(); if (isTriggeredTemplateAdded) { - metaDataBuilder.put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME)); + metaDataBuilder.put(IndexTemplateMetaData.builder(TRIGGERED_TEMPLATE_NAME)); } boolean isWatchesTemplateAdded = randomBoolean(); if (isWatchesTemplateAdded) { // ensure not all templates are added, otherwise life cycle service would start if ((isHistoryTemplateAdded || isTriggeredTemplateAdded) == false) { - metaDataBuilder.put(IndexTemplateMetaData.builder(WatcherIndexTemplateRegistry.WATCHES_TEMPLATE_NAME)); + metaDataBuilder.put(IndexTemplateMetaData.builder(WATCHES_TEMPLATE_NAME)); } } ClusterState state = ClusterState.builder(new ClusterName("my-cluster")).nodes(nodes).metaData(metaDataBuilder).build(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index 631d0fca24e..274bd9b6f10 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -10,15 +10,22 @@ import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.ClearScrollAction; +import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; +import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -32,6 +39,8 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -41,7 +50,6 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.XPackSettings; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.execution.ExecutionService; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; import org.elasticsearch.xpack.watcher.trigger.TriggerService; @@ -56,6 +64,7 @@ import java.util.HashSet; import java.util.List; import static java.util.Arrays.asList; +import static org.elasticsearch.xpack.watcher.watch.Watch.INDEX; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; @@ -74,10 +83,9 @@ public class WatcherServiceTests extends ESTestCase { ExecutionService executionService = mock(ExecutionService.class); when(executionService.validate(anyObject())).thenReturn(true); Watch.Parser parser = mock(Watch.Parser.class); - InternalClient client = mock(InternalClient.class); WatcherService service = new WatcherService(Settings.EMPTY, triggerService, triggeredWatchStore, - executionService, parser, client); + executionService, parser, mock(Client.class)); ClusterState.Builder csBuilder = new ClusterState.Builder(new ClusterName("_name")); MetaData.Builder metaDataBuilder = MetaData.builder(); @@ -102,9 +110,10 @@ public class WatcherServiceTests extends ESTestCase { Watch.Parser parser = mock(Watch.Parser.class); Client client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); - InternalClient internalClient = new InternalClient(settings, threadPool, client); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); WatcherService service = new WatcherService(settings, triggerService, triggeredWatchStore, - executionService, parser, internalClient); + executionService, parser, client); // cluster state setup, with one node, one shard @@ -136,11 +145,21 @@ public class WatcherServiceTests extends ESTestCase { RefreshResponse refreshResponse = mock(RefreshResponse.class); when(refreshResponse.getSuccessfulShards()) .thenReturn(clusterState.getMetaData().getIndices().get(Watch.INDEX).getNumberOfShards()); + AdminClient adminClient = mock(AdminClient.class); + IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); + when(client.admin()).thenReturn(adminClient); + when(adminClient.indices()).thenReturn(indicesAdminClient); + PlainActionFuture refreshFuture = new PlainActionFuture<>(); + when(indicesAdminClient.refresh(any(RefreshRequest.class))).thenReturn(refreshFuture); + refreshFuture.onResponse(refreshResponse); // empty scroll response, no further scrolling needed SearchResponseSections scrollSearchSections = new SearchResponseSections(SearchHits.empty(), null, null, false, false, null, 1); SearchResponse scrollSearchResponse = new SearchResponse(scrollSearchSections, "scrollId", 1, 1, 0, 10, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + PlainActionFuture searchScrollResponseFuture = new PlainActionFuture<>(); + when(client.searchScroll(any(SearchScrollRequest.class))).thenReturn(searchScrollResponseFuture); + searchScrollResponseFuture.onResponse(scrollSearchResponse); // one search response containing active and inactive watches int count = randomIntBetween(2, 200); @@ -168,27 +187,13 @@ public class WatcherServiceTests extends ESTestCase { SearchResponseSections sections = new SearchResponseSections(searchHits, null, null, false, false, null, 1); SearchResponse searchResponse = new SearchResponse(sections, "scrollId", 1, 1, 0, 10, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + PlainActionFuture searchResponseFuture = new PlainActionFuture<>(); + when(client.search(any(SearchRequest.class))).thenReturn(searchResponseFuture); + searchResponseFuture.onResponse(searchResponse); - // we do need to to use this kind of mocking because of the internal client, which calls doExecute at the end on the supplied - // client instance - doAnswer(invocation -> { - Action action = (Action) invocation.getArguments()[0]; - ActionListener listener = (ActionListener) invocation.getArguments()[2]; - - if (RefreshAction.NAME.equals(action.name())) { - listener.onResponse(refreshResponse); - } else if (ClearScrollAction.NAME.equals(action.name())) { - listener.onResponse(new ClearScrollResponse(true, 1)); - } else if (SearchAction.NAME.equals(action.name())) { - listener.onResponse(searchResponse); - } else if (SearchScrollAction.NAME.equals(action.name())) { - listener.onResponse(scrollSearchResponse); - } else { - listener.onFailure(new ElasticsearchException("Unknown action call " + action.name())); - } - - return null; - }).when(client).execute(any(), any(), any()); + PlainActionFuture clearScrollFuture = new PlainActionFuture<>(); + when(client.clearScroll(any(ClearScrollRequest.class))).thenReturn(clearScrollFuture); + clearScrollFuture.onResponse(new ClearScrollResponse(true, 1)); service.start(clusterState); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java index 7eb22d06581..04442f0f793 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/actions/index/IndexActionTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.actions.Action; import org.elasticsearch.xpack.watcher.actions.Action.Result.Status; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; @@ -204,9 +203,7 @@ public class IndexActionTests extends ESIntegTestCase { } builder.endObject(); Client client = client(); - InternalClient internalClient = new InternalClient(client.settings(), client.threadPool(), client); - - IndexActionFactory actionParser = new IndexActionFactory(Settings.EMPTY, internalClient); + IndexActionFactory actionParser = new IndexActionFactory(Settings.EMPTY, client); XContentParser parser = createParser(builder); parser.nextToken(); @@ -235,9 +232,8 @@ public class IndexActionTests extends ESIntegTestCase { } builder.endObject(); Client client = client(); - InternalClient internalClient = new InternalClient(client.settings(), client.threadPool(), client); - IndexActionFactory actionParser = new IndexActionFactory(Settings.EMPTY, internalClient); + IndexActionFactory actionParser = new IndexActionFactory(Settings.EMPTY, client); XContentParser parser = createParser(builder); parser.nextToken(); try { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java index 0ee079ae6fc..3971f816bba 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -27,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; import org.elasticsearch.xpack.watcher.actions.Action; import org.elasticsearch.xpack.watcher.actions.ActionStatus; @@ -125,6 +127,9 @@ public class ExecutionServiceTests extends ESTestCase { clock = ClockMock.frozen(); client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); parser = mock(Watch.Parser.class); DiscoveryNode discoveryNode = new DiscoveryNode("node_1", ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(), diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 76df32f1d2e..b0608a8caba 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -42,6 +43,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.watcher.common.http.HttpClient; import org.elasticsearch.xpack.watcher.notification.email.EmailService; import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; @@ -92,6 +94,9 @@ public class TriggeredWatchStoreTests extends ESTestCase { @Before public void init() { client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); parser = mock(TriggeredWatch.Parser.class); triggeredWatchStore = new TriggeredWatchStore(Settings.EMPTY, client, parser); triggeredWatchStore.start(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java index c9019eabd41..386affd815f 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java @@ -13,20 +13,21 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.watcher.common.http.HttpClient; -import org.elasticsearch.xpack.watcher.common.http.HttpRequest; -import org.elasticsearch.xpack.watcher.common.http.HttpResponse; -import org.elasticsearch.xpack.watcher.notification.jira.JiraAccount; -import org.elasticsearch.xpack.watcher.notification.jira.JiraIssue; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.watcher.actions.ActionStatus; import org.elasticsearch.xpack.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.watcher.actions.jira.JiraAction; +import org.elasticsearch.xpack.watcher.common.http.HttpClient; +import org.elasticsearch.xpack.watcher.common.http.HttpRequest; +import org.elasticsearch.xpack.watcher.common.http.HttpResponse; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.execution.WatchExecutionResult; import org.elasticsearch.xpack.watcher.execution.Wid; -import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry; +import org.elasticsearch.xpack.watcher.notification.jira.JiraAccount; +import org.elasticsearch.xpack.watcher.notification.jira.JiraIssue; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTriggerEvent; import org.elasticsearch.xpack.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.watch.WatchStatus; @@ -39,6 +40,7 @@ import org.junit.Before; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.elasticsearch.xpack.watcher.history.HistoryStore.getHistoryIndexNameForTime; +import static org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry.INDEX_TEMPLATE_VERSION; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -59,6 +61,9 @@ public class HistoryStoreTests extends ESTestCase { @Before public void init() { client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); historyStore = new HistoryStore(Settings.EMPTY, client); historyStore.start(); } @@ -105,7 +110,7 @@ public class HistoryStoreTests extends ESTestCase { } public void testIndexNameGeneration() { - String indexTemplateVersion = WatcherIndexTemplateRegistry.INDEX_TEMPLATE_VERSION; + String indexTemplateVersion = INDEX_TEMPLATE_VERSION; assertThat(getHistoryIndexNameForTime(new DateTime(0, UTC)), equalTo(".watcher-history-"+ indexTemplateVersion +"-1970.01.01")); assertThat(getHistoryIndexNameForTime(new DateTime(100000000000L, UTC)), diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java index 2fee07b4a2b..21134223103 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.support; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.client.AdminClient; @@ -29,7 +28,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.security.InternalClient; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -43,7 +41,6 @@ import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.eq; -import static org.mockito.Matchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -61,20 +58,20 @@ public class WatcherIndexTemplateRegistryTests extends ESTestCase { when(threadPool.generic()).thenReturn(EsExecutors.newDirectExecutorService()); client = mock(Client.class); - InternalClient internalClient = new InternalClient(Settings.EMPTY, threadPool, client); + when(client.threadPool()).thenReturn(threadPool); AdminClient adminClient = mock(AdminClient.class); IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); when(adminClient.indices()).thenReturn(indicesAdminClient); when(client.admin()).thenReturn(adminClient); doAnswer(invocationOnMock -> { ActionListener listener = - (ActionListener) invocationOnMock.getArguments()[2]; + (ActionListener) invocationOnMock.getArguments()[1]; listener.onResponse(new TestPutIndexTemplateResponse(true)); return null; - }).when(client).execute(same(PutIndexTemplateAction.INSTANCE), any(), any()); + }).when(indicesAdminClient).putTemplate(any(PutIndexTemplateRequest.class), any(ActionListener.class)); ClusterService clusterService = mock(ClusterService.class); - registry = new WatcherIndexTemplateRegistry(Settings.EMPTY, clusterService, threadPool, internalClient); + registry = new WatcherIndexTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client); } public void testThatNonExistingTemplatesAreAddedImmediately() { @@ -84,13 +81,13 @@ public class WatcherIndexTemplateRegistryTests extends ESTestCase { ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyList(), nodes); registry.clusterChanged(event); ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class); - verify(client, times(3)).execute(anyObject(), argumentCaptor.capture(), anyObject()); + verify(client.admin().indices(), times(3)).putTemplate(argumentCaptor.capture(), anyObject()); // now delete one template from the cluster state and lets retry ClusterChangedEvent newEvent = createClusterChangedEvent(Arrays.asList(WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME, WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME), nodes); registry.clusterChanged(newEvent); - verify(client, times(4)).execute(anyObject(), argumentCaptor.capture(), anyObject()); + verify(client.admin().indices(), times(4)).putTemplate(argumentCaptor.capture(), anyObject()); } public void testThatTemplatesExist() { @@ -115,7 +112,7 @@ public class WatcherIndexTemplateRegistryTests extends ESTestCase { registry.clusterChanged(event); ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(PutIndexTemplateRequest.class); - verify(client, times(1)).execute(anyObject(), argumentCaptor.capture(), anyObject()); + verify(client.admin().indices(), times(1)).putTemplate(argumentCaptor.capture(), anyObject()); assertThat(argumentCaptor.getValue().name(), is(WatcherIndexTemplateRegistry.HISTORY_TEMPLATE_NAME)); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 9caf1b8ffe2..4d29d430d87 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -51,25 +51,25 @@ import org.elasticsearch.xpack.XPackClient; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.watcher.notification.email.Authentication; -import org.elasticsearch.xpack.watcher.notification.email.Email; -import org.elasticsearch.xpack.watcher.notification.email.EmailService; -import org.elasticsearch.xpack.watcher.notification.email.Profile; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.file.FileRealm; import org.elasticsearch.xpack.security.authc.support.Hasher; -import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; import org.elasticsearch.xpack.template.TemplateUtils; import org.elasticsearch.xpack.watcher.WatcherState; import org.elasticsearch.xpack.watcher.client.WatcherClient; import org.elasticsearch.xpack.watcher.execution.ExecutionState; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; import org.elasticsearch.xpack.watcher.history.HistoryStore; +import org.elasticsearch.xpack.watcher.notification.email.Authentication; +import org.elasticsearch.xpack.watcher.notification.email.Email; +import org.elasticsearch.xpack.watcher.notification.email.EmailService; +import org.elasticsearch.xpack.watcher.notification.email.Profile; import org.elasticsearch.xpack.watcher.support.WatcherIndexTemplateRegistry; import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; import org.elasticsearch.xpack.watcher.transport.actions.stats.WatcherStatsResponse; import org.elasticsearch.xpack.watcher.trigger.ScheduleTriggerEngineMock; import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.clock.ClockMock; import org.hamcrest.Matcher; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -86,6 +86,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.time.Clock; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -184,10 +185,8 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase @Override protected Set excludeTemplates() { Set excludes = new HashSet<>(); - for (WatcherIndexTemplateRegistry.TemplateConfig templateConfig : WatcherIndexTemplateRegistry.TEMPLATE_CONFIGS) { - excludes.add(templateConfig.getTemplateName()); - } - return excludes; + excludes.addAll(Arrays.asList(WatcherIndexTemplateRegistry.TEMPLATE_NAMES)); + return Collections.unmodifiableSet(excludes); } @Override diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index 9c68238cabf..320f2b603ce 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -28,6 +29,7 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.watcher.Watcher; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.input.Input; @@ -75,6 +77,9 @@ public class SearchInputTests extends ESTestCase { scriptService = new ScriptService(Settings.EMPTY, engines, contexts); client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); } public void testExecute() throws Exception { diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 0dcbdc7ac59..6215f22e325 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; -import org.elasticsearch.xpack.ml.MachineLearningTemplateRegistry; import org.elasticsearch.xpack.security.SecurityClusterClientYamlTestCase; import org.elasticsearch.xpack.test.rest.XPackRestTestCase; import org.junit.Before; @@ -33,7 +32,7 @@ import static org.hamcrest.Matchers.is; public class UpgradeClusterClientYamlTestSuiteIT extends SecurityClusterClientYamlTestCase { /** - * Waits for the Machine Learning templates to be created by {@link MachineLearningTemplateRegistry} + * Waits for the Machine Learning templates to be created by {@link org.elasticsearch.plugins.MetaDataUpgrader} */ @Before public void waitForTemplates() throws Exception { diff --git a/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java b/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java index a65b4426abb..bb1bc69ccf0 100644 --- a/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java +++ b/qa/smoke-test-ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityIT.java @@ -13,12 +13,15 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.plugins.MetaDataUpgrader; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.xpack.ml.MachineLearningTemplateRegistry; +import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.integration.MlRestTestStateCleaner; +import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.junit.After; import org.junit.Before; @@ -49,14 +52,14 @@ public class MlWithSecurityIT extends ESClientYamlSuiteTestCase { } /** - * Waits for the Security template to be created by the {@link SecurityLifecycleService} and - * the Machine Learning templates to be created by {@link MachineLearningTemplateRegistry} + * Waits for the Security template and the Machine Learning templates to be created by the {@link MetaDataUpgrader} */ @Before public void waitForTemplates() throws Exception { List templates = new ArrayList<>(); templates.add(SecurityLifecycleService.SECURITY_TEMPLATE_NAME); - templates.addAll(Arrays.asList(MachineLearningTemplateRegistry.TEMPLATE_NAMES)); + templates.addAll(Arrays.asList(Auditor.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME, + AnomalyDetectorsIndex.jobStateIndexName(), AnomalyDetectorsIndex.jobResultsIndexPrefix())); for (String template : templates) { awaitCallApi("indices.exists_template", Collections.singletonMap("name", template), Collections.emptyList(), diff --git a/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java b/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java index 60e7401ae07..1dec2e7bd08 100644 --- a/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java +++ b/qa/smoke-test-watcher-with-mustache/src/test/java/org/elasticsearch/smoketest/WatcherWithMustacheIT.java @@ -15,6 +15,8 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; +import java.util.List; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -35,11 +37,12 @@ public class WatcherWithMustacheIT extends ESClientYamlSuiteTestCase { @Before public void startWatcher() throws Exception { + final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistry.TEMPLATE_NAMES); assertBusy(() -> { try { getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap()); - for (String template : WatcherIndexTemplateRegistry.TEMPLATE_NAMES) { + for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template", singletonMap("name", template), emptyList(), emptyMap()); assertThat(templateExistsResponse.getStatusCode(), is(200)); diff --git a/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherWithPainlessIT.java b/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherWithPainlessIT.java index 605ac8b343f..6536ec6f9a5 100644 --- a/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherWithPainlessIT.java +++ b/qa/smoke-test-watcher-with-painless/src/test/java/org/elasticsearch/smoketest/WatcherWithPainlessIT.java @@ -14,6 +14,8 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; +import java.util.List; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -34,11 +36,12 @@ public class WatcherWithPainlessIT extends ESClientYamlSuiteTestCase { @Before public void startWatcher() throws Exception { + final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistry.TEMPLATE_NAMES); assertBusy(() -> { try { getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap()); - for (String template : WatcherIndexTemplateRegistry.TEMPLATE_NAMES) { + for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template", singletonMap("name", template), emptyList(), emptyMap()); assertThat(templateExistsResponse.getStatusCode(), is(200)); diff --git a/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java b/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java index ddb744c4b75..d21cc1d8686 100644 --- a/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java +++ b/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityClientYamlTestSuiteIT.java @@ -18,6 +18,8 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; +import java.util.List; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -41,11 +43,12 @@ public class SmokeTestWatcherWithSecurityClientYamlTestSuiteIT extends ESClientY @Before public void startWatcher() throws Exception { + final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistry.TEMPLATE_NAMES); assertBusy(() -> { try { getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap()); - for (String template : WatcherIndexTemplateRegistry.TEMPLATE_NAMES) { + for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template", singletonMap("name", template), emptyList(), emptyMap()); assertThat(templateExistsResponse.getStatusCode(), is(200)); diff --git a/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherClientYamlTestSuiteIT.java b/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherClientYamlTestSuiteIT.java index 2097f9c0ce4..a1c98216146 100644 --- a/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherClientYamlTestSuiteIT.java +++ b/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherClientYamlTestSuiteIT.java @@ -17,6 +17,8 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; +import java.util.List; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -39,11 +41,12 @@ public class SmokeTestWatcherClientYamlTestSuiteIT extends ESClientYamlSuiteTest @Before public void startWatcher() throws Exception { + final List watcherTemplates = Arrays.asList(WatcherIndexTemplateRegistry.TEMPLATE_NAMES); assertBusy(() -> { try { getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap()); - for (String template : WatcherIndexTemplateRegistry.TEMPLATE_NAMES) { + for (String template : watcherTemplates) { ClientYamlTestResponse templateExistsResponse = getAdminExecutionContext().callApi("indices.exists_template", singletonMap("name", template), emptyList(), emptyMap()); assertThat(templateExistsResponse.getStatusCode(), is(200)); diff --git a/qa/tribe-tests-with-security/build.gradle b/qa/tribe-tests-with-security/build.gradle index 307b52c21bb..c8ef1299ae4 100644 --- a/qa/tribe-tests-with-security/build.gradle +++ b/qa/tribe-tests-with-security/build.gradle @@ -15,6 +15,8 @@ dependencies { namingConventions.skipIntegTestInDisguise = true +compileTestJava.options.compilerArgs << "-Xlint:-try" + String xpackPath = project(':x-pack-elasticsearch:plugin').projectDir.toPath().resolve('src/test/resources').toString() sourceSets { test { diff --git a/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/xpack/security/SecurityTribeTests.java b/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/xpack/security/SecurityTribeTests.java index 827a5b033ce..9339c8ce20e 100644 --- a/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/xpack/security/SecurityTribeTests.java +++ b/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/xpack/security/SecurityTribeTests.java @@ -144,8 +144,10 @@ public class SecurityTribeTests extends NativeRealmIntegTestCase { cluster2.wipe(Collections.emptySet()); try { // this is a hack to clean up the .security index since only the XPackSecurity user or superusers can delete it - internalSecurityClient(cluster2.client()) - .admin().indices().prepareDelete(IndexLifecycleManager.INTERNAL_SECURITY_INDEX).get(); + final Client cluster2Client = cluster2.client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING))); + cluster2Client.admin().indices().prepareDelete(IndexLifecycleManager.INTERNAL_SECURITY_INDEX).get(); } catch (IndexNotFoundException e) { // ignore it since not all tests create this index... }