Use system context for cluster state update tasks (#31241)

This commit makes it so that cluster state update tasks always run under the system context, only
restoring the original context when the listener that was provided with the task is called. A notable
exception is the clusterStatePublished(...) callback which will still run under system context,
because it's defined on the executor-level, and not the task level, and only called once for the
combined batch of tasks and can therefore not be uniquely identified with a task / thread context.

Relates #30603
This commit is contained in:
Yannick Welsch 2018-06-18 16:46:04 +02:00 committed by GitHub
parent 1502812c1a
commit 02a4ef38a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 236 additions and 92 deletions

View File

@ -41,6 +41,9 @@ public interface ClusterStateTaskExecutor<T> {
/** /**
* Callback invoked after new cluster state is published. Note that * Callback invoked after new cluster state is published. Note that
* this method is not invoked if the cluster state was not updated. * this method is not invoked if the cluster state was not updated.
*
* Note that this method will be executed using system context.
*
* @param clusterChangedEvent the change event for this cluster state change, containing * @param clusterChangedEvent the change event for this cluster state change, containing
* both old and new states * both old and new states
*/ */

View File

@ -62,6 +62,12 @@ public abstract class ClusterStateUpdateTask implements ClusterStateTaskConfig,
*/ */
public abstract void onFailure(String source, Exception e); public abstract void onFailure(String source, Exception e);
@Override
public final void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) {
// final, empty implementation here as this method should only be defined in combination
// with a batching executor as it will always be executed within the system context.
}
/** /**
* If the cluster state update task wasn't processed by the provided timeout, call * If the cluster state update task wasn't processed by the provided timeout, call
* {@link ClusterStateTaskListener#onFailure(String, Exception)}. May return null to indicate no timeout is needed (default). * {@link ClusterStateTaskListener#onFailure(String, Exception)}. May return null to indicate no timeout is needed (default).

View File

@ -47,6 +47,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -59,6 +60,7 @@ import java.util.Objects;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.elasticsearch.cluster.service.ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING; import static org.elasticsearch.cluster.service.ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING;
@ -426,26 +428,28 @@ public class MasterService extends AbstractLifecycleComponent {
return threadPoolExecutor.getMaxTaskWaitTime(); return threadPoolExecutor.getMaxTaskWaitTime();
} }
private SafeClusterStateTaskListener safe(ClusterStateTaskListener listener) { private SafeClusterStateTaskListener safe(ClusterStateTaskListener listener, Supplier<ThreadContext.StoredContext> contextSupplier) {
if (listener instanceof AckedClusterStateTaskListener) { if (listener instanceof AckedClusterStateTaskListener) {
return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, logger); return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, contextSupplier, logger);
} else { } else {
return new SafeClusterStateTaskListener(listener, logger); return new SafeClusterStateTaskListener(listener, contextSupplier, logger);
} }
} }
private static class SafeClusterStateTaskListener implements ClusterStateTaskListener { private static class SafeClusterStateTaskListener implements ClusterStateTaskListener {
private final ClusterStateTaskListener listener; private final ClusterStateTaskListener listener;
protected final Supplier<ThreadContext.StoredContext> context;
private final Logger logger; private final Logger logger;
SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) { SafeClusterStateTaskListener(ClusterStateTaskListener listener, Supplier<ThreadContext.StoredContext> context, Logger logger) {
this.listener = listener; this.listener = listener;
this.context = context;
this.logger = logger; this.logger = logger;
} }
@Override @Override
public void onFailure(String source, Exception e) { public void onFailure(String source, Exception e) {
try { try (ThreadContext.StoredContext ignore = context.get()) {
listener.onFailure(source, e); listener.onFailure(source, e);
} catch (Exception inner) { } catch (Exception inner) {
inner.addSuppressed(e); inner.addSuppressed(e);
@ -456,7 +460,7 @@ public class MasterService extends AbstractLifecycleComponent {
@Override @Override
public void onNoLongerMaster(String source) { public void onNoLongerMaster(String source) {
try { try (ThreadContext.StoredContext ignore = context.get()) {
listener.onNoLongerMaster(source); listener.onNoLongerMaster(source);
} catch (Exception e) { } catch (Exception e) {
logger.error(() -> new ParameterizedMessage( logger.error(() -> new ParameterizedMessage(
@ -466,7 +470,7 @@ public class MasterService extends AbstractLifecycleComponent {
@Override @Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
try { try (ThreadContext.StoredContext ignore = context.get()) {
listener.clusterStateProcessed(source, oldState, newState); listener.clusterStateProcessed(source, oldState, newState);
} catch (Exception e) { } catch (Exception e) {
logger.error(() -> new ParameterizedMessage( logger.error(() -> new ParameterizedMessage(
@ -480,8 +484,9 @@ public class MasterService extends AbstractLifecycleComponent {
private final AckedClusterStateTaskListener listener; private final AckedClusterStateTaskListener listener;
private final Logger logger; private final Logger logger;
SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) { SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Supplier<ThreadContext.StoredContext> context,
super(listener, logger); Logger logger) {
super(listener, context, logger);
this.listener = listener; this.listener = listener;
this.logger = logger; this.logger = logger;
} }
@ -493,7 +498,7 @@ public class MasterService extends AbstractLifecycleComponent {
@Override @Override
public void onAllNodesAcked(@Nullable Exception e) { public void onAllNodesAcked(@Nullable Exception e) {
try { try (ThreadContext.StoredContext ignore = context.get()) {
listener.onAllNodesAcked(e); listener.onAllNodesAcked(e);
} catch (Exception inner) { } catch (Exception inner) {
inner.addSuppressed(e); inner.addSuppressed(e);
@ -503,7 +508,7 @@ public class MasterService extends AbstractLifecycleComponent {
@Override @Override
public void onAckTimeout() { public void onAckTimeout() {
try { try (ThreadContext.StoredContext ignore = context.get()) {
listener.onAckTimeout(); listener.onAckTimeout();
} catch (Exception e) { } catch (Exception e) {
logger.error("exception thrown by listener while notifying on ack timeout", e); logger.error("exception thrown by listener while notifying on ack timeout", e);
@ -724,9 +729,13 @@ public class MasterService extends AbstractLifecycleComponent {
if (!lifecycle.started()) { if (!lifecycle.started()) {
return; return;
} }
try { final ThreadContext threadContext = threadPool.getThreadContext();
final Supplier<ThreadContext.StoredContext> supplier = threadContext.newRestorableContext(false);
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
threadContext.markAsSystemContext();
List<Batcher.UpdateTask> safeTasks = tasks.entrySet().stream() List<Batcher.UpdateTask> safeTasks = tasks.entrySet().stream()
.map(e -> taskBatcher.new UpdateTask(config.priority(), source, e.getKey(), safe(e.getValue()), executor)) .map(e -> taskBatcher.new UpdateTask(config.priority(), source, e.getKey(), safe(e.getValue(), supplier), executor))
.collect(Collectors.toList()); .collect(Collectors.toList());
taskBatcher.submitTasks(safeTasks, config.timeout()); taskBatcher.submitTasks(safeTasks, config.timeout());
} catch (EsRejectedExecutionException e) { } catch (EsRejectedExecutionException e) {

View File

@ -556,7 +556,6 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
@Override @Override
public void handleResponse(ClusterStateResponse response) { public void handleResponse(ClusterStateResponse response) {
assert transportService.getThreadPool().getThreadContext().isSystemContext() == false : "context is a system context";
try { try {
if (remoteClusterName.get() == null) { if (remoteClusterName.get() == null) {
assert response.getClusterName().value() != null; assert response.getClusterName().value() != null;
@ -597,7 +596,6 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
@Override @Override
public void handleException(TransportException exp) { public void handleException(TransportException exp) {
assert transportService.getThreadPool().getThreadContext().isSystemContext() == false : "context is a system context";
logger.warn(() -> new ParameterizedMessage("fetching nodes from external cluster {} failed", clusterAlias), exp); logger.warn(() -> new ParameterizedMessage("fetching nodes from external cluster {} failed", clusterAlias), exp);
try { try {
IOUtils.closeWhileHandlingException(connection); IOUtils.closeWhileHandlingException(connection);

View File

@ -34,12 +34,14 @@ import org.elasticsearch.cluster.LocalClusterUpdateTask;
import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.BaseFuture; import org.elasticsearch.common.util.concurrent.BaseFuture;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.MockLogAppender;
@ -52,6 +54,7 @@ import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -168,6 +171,85 @@ public class MasterServiceTests extends ESTestCase {
nonMaster.close(); nonMaster.close();
} }
public void testThreadContext() throws InterruptedException {
final TimedMasterService master = createTimedMasterService(true);
final CountDownLatch latch = new CountDownLatch(1);
try (ThreadContext.StoredContext ignored = threadPool.getThreadContext().stashContext()) {
final Map<String, String> expectedHeaders = Collections.singletonMap("test", "test");
threadPool.getThreadContext().putHeader(expectedHeaders);
final TimeValue ackTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000));
final TimeValue masterTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000));
master.submitStateUpdateTask("test", new AckedClusterStateUpdateTask<Void>(null, null) {
@Override
public ClusterState execute(ClusterState currentState) {
assertTrue(threadPool.getThreadContext().isSystemContext());
assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getHeaders());
if (randomBoolean()) {
return ClusterState.builder(currentState).build();
} else if (randomBoolean()) {
return currentState;
} else {
throw new IllegalArgumentException("mock failure");
}
}
@Override
public void onFailure(String source, Exception e) {
assertFalse(threadPool.getThreadContext().isSystemContext());
assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders());
latch.countDown();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
assertFalse(threadPool.getThreadContext().isSystemContext());
assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders());
latch.countDown();
}
@Override
protected Void newResponse(boolean acknowledged) {
return null;
}
public TimeValue ackTimeout() {
return ackTimeout;
}
@Override
public TimeValue timeout() {
return masterTimeout;
}
@Override
public void onAllNodesAcked(@Nullable Exception e) {
assertFalse(threadPool.getThreadContext().isSystemContext());
assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders());
latch.countDown();
}
@Override
public void onAckTimeout() {
assertFalse(threadPool.getThreadContext().isSystemContext());
assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders());
latch.countDown();
}
});
assertFalse(threadPool.getThreadContext().isSystemContext());
assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders());
}
latch.await();
master.close();
}
/* /*
* test that a listener throwing an exception while handling a * test that a listener throwing an exception while handling a
* notification does not prevent publication notification to the * notification does not prevent publication notification to the

View File

@ -20,7 +20,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -293,7 +292,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom {
return this; return this;
} }
public Builder putDatafeed(DatafeedConfig datafeedConfig, ThreadContext threadContext) { public Builder putDatafeed(DatafeedConfig datafeedConfig, Map<String, String> headers) {
if (datafeeds.containsKey(datafeedConfig.getId())) { if (datafeeds.containsKey(datafeedConfig.getId())) {
throw new ResourceAlreadyExistsException("A datafeed with id [" + datafeedConfig.getId() + "] already exists"); throw new ResourceAlreadyExistsException("A datafeed with id [" + datafeedConfig.getId() + "] already exists");
} }
@ -302,13 +301,13 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom {
Job job = jobs.get(jobId); Job job = jobs.get(jobId);
DatafeedJobValidator.validate(datafeedConfig, job); DatafeedJobValidator.validate(datafeedConfig, job);
if (threadContext != null) { if (headers.isEmpty() == false) {
// Adjust the request, adding security headers from the current thread context // Adjust the request, adding security headers from the current thread context
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedConfig); DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedConfig);
Map<String, String> headers = threadContext.getHeaders().entrySet().stream() Map<String, String> securityHeaders = headers.entrySet().stream()
.filter(e -> ClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) .filter(e -> ClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
builder.setHeaders(headers); builder.setHeaders(securityHeaders);
datafeedConfig = builder.build(); datafeedConfig = builder.build();
} }
@ -328,7 +327,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom {
} }
} }
public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaData persistentTasks, ThreadContext threadContext) { public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaData persistentTasks, Map<String, String> headers) {
String datafeedId = update.getId(); String datafeedId = update.getId();
DatafeedConfig oldDatafeedConfig = datafeeds.get(datafeedId); DatafeedConfig oldDatafeedConfig = datafeeds.get(datafeedId);
if (oldDatafeedConfig == null) { if (oldDatafeedConfig == null) {
@ -336,7 +335,7 @@ public class MlMetadata implements XPackPlugin.XPackMetaDataCustom {
} }
checkDatafeedIsStopped(() -> Messages.getMessage(Messages.DATAFEED_CANNOT_UPDATE_IN_CURRENT_STATE, datafeedId, checkDatafeedIsStopped(() -> Messages.getMessage(Messages.DATAFEED_CANNOT_UPDATE_IN_CURRENT_STATE, datafeedId,
DatafeedState.STARTED), datafeedId, persistentTasks); DatafeedState.STARTED), datafeedId, persistentTasks);
DatafeedConfig newDatafeedConfig = update.apply(oldDatafeedConfig, threadContext); DatafeedConfig newDatafeedConfig = update.apply(oldDatafeedConfig, headers);
if (newDatafeedConfig.getJobId().equals(oldDatafeedConfig.getJobId()) == false) { if (newDatafeedConfig.getJobId().equals(oldDatafeedConfig.getJobId()) == false) {
checkJobIsAvailableForDatafeed(newDatafeedConfig.getJobId()); checkJobIsAvailableForDatafeed(newDatafeedConfig.getJobId());
} }

View File

@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -264,7 +263,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
* Applies the update to the given {@link DatafeedConfig} * Applies the update to the given {@link DatafeedConfig}
* @return a new {@link DatafeedConfig} that contains the update * @return a new {@link DatafeedConfig} that contains the update
*/ */
public DatafeedConfig apply(DatafeedConfig datafeedConfig, ThreadContext threadContext) { public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map<String, String> headers) {
if (id.equals(datafeedConfig.getId()) == false) { if (id.equals(datafeedConfig.getId()) == false) {
throw new IllegalArgumentException("Cannot apply update to datafeedConfig with different id"); throw new IllegalArgumentException("Cannot apply update to datafeedConfig with different id");
} }
@ -301,12 +300,12 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
builder.setChunkingConfig(chunkingConfig); builder.setChunkingConfig(chunkingConfig);
} }
if (threadContext != null) { if (headers.isEmpty() == false) {
// Adjust the request, adding security headers from the current thread context // Adjust the request, adding security headers from the current thread context
Map<String, String> headers = threadContext.getHeaders().entrySet().stream() Map<String, String> securityHeaders = headers.entrySet().stream()
.filter(e -> ClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) .filter(e -> ClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
builder.setHeaders(headers); builder.setHeaders(securityHeaders);
} }
return builder.build(); return builder.build();

View File

@ -114,7 +114,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
public void testApply_givenEmptyUpdate() { public void testApply_givenEmptyUpdate() {
DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo");
DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, null); DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, Collections.emptyMap());
assertThat(datafeed, equalTo(updatedDatafeed)); assertThat(datafeed, equalTo(updatedDatafeed));
} }
@ -125,7 +125,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
DatafeedUpdate.Builder updated = new DatafeedUpdate.Builder(datafeed.getId()); DatafeedUpdate.Builder updated = new DatafeedUpdate.Builder(datafeed.getId());
updated.setScrollSize(datafeed.getScrollSize() + 1); updated.setScrollSize(datafeed.getScrollSize() + 1);
DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap());
DatafeedConfig.Builder expectedDatafeed = new DatafeedConfig.Builder(datafeed); DatafeedConfig.Builder expectedDatafeed = new DatafeedConfig.Builder(datafeed);
expectedDatafeed.setScrollSize(datafeed.getScrollSize() + 1); expectedDatafeed.setScrollSize(datafeed.getScrollSize() + 1);
@ -149,7 +149,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
update.setScrollSize(8000); update.setScrollSize(8000);
update.setChunkingConfig(ChunkingConfig.newManual(TimeValue.timeValueHours(1))); update.setChunkingConfig(ChunkingConfig.newManual(TimeValue.timeValueHours(1)));
DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap());
assertThat(updatedDatafeed.getJobId(), equalTo("bar")); assertThat(updatedDatafeed.getJobId(), equalTo("bar"));
assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2"))); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2")));
@ -175,7 +175,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
update.setAggregations(new AggregatorFactories.Builder().addAggregator( update.setAggregations(new AggregatorFactories.Builder().addAggregator(
AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime))); AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime)));
DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap());
assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1"))); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1")));
assertThat(updatedDatafeed.getTypes(), equalTo(Collections.singletonList("t_1"))); assertThat(updatedDatafeed.getTypes(), equalTo(Collections.singletonList("t_1")));

View File

@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterStateUpdateTask;
@ -213,7 +212,7 @@ public class TransportDeleteJobAction extends TransportMasterNodeAction<DeleteJo
} }
@Override @Override
public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
logger.debug("Job [" + jobId + "] is successfully marked as deleted"); logger.debug("Job [" + jobId + "] is successfully marked as deleted");
listener.onResponse(true); listener.onResponse(true);
} }

View File

@ -41,6 +41,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.core.security.support.Exceptions; import org.elasticsearch.xpack.core.security.support.Exceptions;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDatafeedAction.Request, PutDatafeedAction.Response> { public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDatafeedAction.Request, PutDatafeedAction.Response> {
@ -95,7 +96,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener);
} else { } else {
putDatafeed(request, listener); putDatafeed(request, threadPool.getThreadContext().getHeaders(), listener);
} }
} }
@ -103,7 +104,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
HasPrivilegesResponse response, HasPrivilegesResponse response,
ActionListener<PutDatafeedAction.Response> listener) throws IOException { ActionListener<PutDatafeedAction.Response> listener) throws IOException {
if (response.isCompleteMatch()) { if (response.isCompleteMatch()) {
putDatafeed(request, listener); putDatafeed(request, threadPool.getThreadContext().getHeaders(), listener);
} else { } else {
XContentBuilder builder = JsonXContent.contentBuilder(); XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject(); builder.startObject();
@ -120,7 +121,8 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
} }
} }
private void putDatafeed(PutDatafeedAction.Request request, ActionListener<PutDatafeedAction.Response> listener) { private void putDatafeed(PutDatafeedAction.Request request, Map<String, String> headers,
ActionListener<PutDatafeedAction.Response> listener) {
clusterService.submitStateUpdateTask( clusterService.submitStateUpdateTask(
"put-datafeed-" + request.getDatafeed().getId(), "put-datafeed-" + request.getDatafeed().getId(),
@ -136,16 +138,16 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
@Override @Override
public ClusterState execute(ClusterState currentState) { public ClusterState execute(ClusterState currentState) {
return putDatafeed(request, currentState); return putDatafeed(request, headers, currentState);
} }
}); });
} }
private ClusterState putDatafeed(PutDatafeedAction.Request request, ClusterState clusterState) { private ClusterState putDatafeed(PutDatafeedAction.Request request, Map<String, String> headers, ClusterState clusterState) {
XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); XPackPlugin.checkReadyForXPackCustomMetadata(clusterState);
MlMetadata currentMetadata = MlMetadata.getMlMetadata(clusterState); MlMetadata currentMetadata = MlMetadata.getMlMetadata(clusterState);
MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata)
.putDatafeed(request.getDatafeed(), threadPool.getThreadContext()).build(); .putDatafeed(request.getDatafeed(), headers).build();
return ClusterState.builder(clusterState).metaData( return ClusterState.builder(clusterState).metaData(
MetaData.builder(clusterState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()) MetaData.builder(clusterState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build())
.build(); .build();

View File

@ -27,6 +27,8 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import java.util.Map;
public class TransportUpdateDatafeedAction extends TransportMasterNodeAction<UpdateDatafeedAction.Request, PutDatafeedAction.Response> { public class TransportUpdateDatafeedAction extends TransportMasterNodeAction<UpdateDatafeedAction.Request, PutDatafeedAction.Response> {
@Inject @Inject
@ -50,6 +52,8 @@ public class TransportUpdateDatafeedAction extends TransportMasterNodeAction<Upd
@Override @Override
protected void masterOperation(UpdateDatafeedAction.Request request, ClusterState state, protected void masterOperation(UpdateDatafeedAction.Request request, ClusterState state,
ActionListener<PutDatafeedAction.Response> listener) { ActionListener<PutDatafeedAction.Response> listener) {
final Map<String, String> headers = threadPool.getThreadContext().getHeaders();
clusterService.submitStateUpdateTask("update-datafeed-" + request.getUpdate().getId(), clusterService.submitStateUpdateTask("update-datafeed-" + request.getUpdate().getId(),
new AckedClusterStateUpdateTask<PutDatafeedAction.Response>(request, listener) { new AckedClusterStateUpdateTask<PutDatafeedAction.Response>(request, listener) {
private volatile DatafeedConfig updatedDatafeed; private volatile DatafeedConfig updatedDatafeed;
@ -69,7 +73,7 @@ public class TransportUpdateDatafeedAction extends TransportMasterNodeAction<Upd
PersistentTasksCustomMetaData persistentTasks = PersistentTasksCustomMetaData persistentTasks =
currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE);
MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata)
.updateDatafeed(update, persistentTasks, threadPool.getThreadContext()).build(); .updateDatafeed(update, persistentTasks, headers).build();
updatedDatafeed = newMetadata.getDatafeed(update.getId()); updatedDatafeed = newMetadata.getDatafeed(update.getId());
return ClusterState.builder(currentState).metaData( return ClusterState.builder(currentState).metaData(
MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()).build(); MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()).build();

View File

@ -11,7 +11,6 @@ import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
@ -347,8 +346,8 @@ public class JobManager extends AbstractComponent {
} }
@Override @Override
public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
afterClusterStateUpdate(clusterChangedEvent.state(), request); afterClusterStateUpdate(newState, request);
actionListener.onResponse(new PutJobAction.Response(updatedJob.get())); actionListener.onResponse(new PutJobAction.Response(updatedJob.get()));
} }
}); });

View File

@ -30,9 +30,11 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState;
import org.elasticsearch.xpack.core.ml.job.config.JobTests; import org.elasticsearch.xpack.core.ml.job.config.JobTests;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder;
@ -42,6 +44,7 @@ import static org.elasticsearch.xpack.ml.datafeed.DatafeedManagerTests.createDat
import static org.elasticsearch.xpack.ml.datafeed.DatafeedManagerTests.createDatafeedJob; import static org.elasticsearch.xpack.ml.datafeed.DatafeedManagerTests.createDatafeedJob;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.sameInstance;
@ -63,7 +66,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
} }
job = new Job.Builder(job).setAnalysisConfig(analysisConfig).build(); job = new Job.Builder(job).setAnalysisConfig(analysisConfig).build();
builder.putJob(job, false); builder.putJob(job, false);
builder.putDatafeed(datafeedConfig, null); builder.putDatafeed(datafeedConfig, Collections.emptyMap());
} else { } else {
builder.putJob(job, false); builder.putJob(job, false);
} }
@ -164,7 +167,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> builder.deleteJob(job1.getId(), new PersistentTasksCustomMetaData(0L, Collections.emptyMap()))); () -> builder.deleteJob(job1.getId(), new PersistentTasksCustomMetaData(0L, Collections.emptyMap())));
@ -184,7 +187,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("job_id"), sameInstance(job1)); assertThat(result.getJobs().get("job_id"), sameInstance(job1));
@ -201,7 +204,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", "missing-job").build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", "missing-job").build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, null)); expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap()));
} }
public void testPutDatafeed_failBecauseJobIsBeingDeleted() { public void testPutDatafeed_failBecauseJobIsBeingDeleted() {
@ -210,7 +213,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, null)); expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap()));
} }
public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() { public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() {
@ -218,9 +221,9 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1, null)); expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap()));
} }
public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() { public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() {
@ -229,10 +232,10 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig datafeedConfig2 = createDatafeedConfig("datafeed2", job1.getId()).build(); DatafeedConfig datafeedConfig2 = createDatafeedConfig("datafeed2", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> builder.putDatafeed(datafeedConfig2, null)); () -> builder.putDatafeed(datafeedConfig2, Collections.emptyMap()));
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
} }
@ -246,7 +249,23 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1.build(now), false); builder.putJob(job1.build(now), false);
expectThrows(ElasticsearchStatusException.class, () -> builder.putDatafeed(datafeedConfig1, null)); expectThrows(ElasticsearchStatusException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap()));
}
public void testPutDatafeed_setsSecurityHeaders() {
Job datafeedJob = createDatafeedJob().build(new Date());
DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", datafeedJob.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(datafeedJob, false);
Map<String, String> headers = new HashMap<>();
headers.put("unrelated_header", "unrelated_header_value");
headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user");
builder.putDatafeed(datafeedConfig, headers);
MlMetadata metadata = builder.build();
assertThat(metadata.getDatafeed("datafeed1").getHeaders().size(), equalTo(1));
assertThat(metadata.getDatafeed("datafeed1").getHeaders(),
hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user"));
} }
public void testUpdateDatafeed() { public void testUpdateDatafeed() {
@ -254,12 +273,13 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
MlMetadata beforeMetadata = builder.build(); MlMetadata beforeMetadata = builder.build();
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId());
update.setScrollSize(5000); update.setScrollSize(5000);
MlMetadata updatedMetadata = new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null).build(); MlMetadata updatedMetadata =
new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap()).build();
DatafeedConfig updatedDatafeed = updatedMetadata.getDatafeed(datafeedConfig1.getId()); DatafeedConfig updatedDatafeed = updatedMetadata.getDatafeed(datafeedConfig1.getId());
assertThat(updatedDatafeed.getJobId(), equalTo(datafeedConfig1.getJobId())); assertThat(updatedDatafeed.getJobId(), equalTo(datafeedConfig1.getJobId()));
@ -271,7 +291,8 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
public void testUpdateDatafeed_failBecauseDatafeedDoesNotExist() { public void testUpdateDatafeed_failBecauseDatafeedDoesNotExist() {
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder("job_id"); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder("job_id");
update.setScrollSize(5000); update.setScrollSize(5000);
expectThrows(ResourceNotFoundException.class, () -> new MlMetadata.Builder().updateDatafeed(update.build(), null, null).build()); expectThrows(ResourceNotFoundException.class,
() -> new MlMetadata.Builder().updateDatafeed(update.build(), null, Collections.emptyMap()).build());
} }
public void testUpdateDatafeed_failBecauseDatafeedIsNotStopped() { public void testUpdateDatafeed_failBecauseDatafeedIsNotStopped() {
@ -279,7 +300,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
MlMetadata beforeMetadata = builder.build(); MlMetadata beforeMetadata = builder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -300,14 +321,14 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
MlMetadata beforeMetadata = builder.build(); MlMetadata beforeMetadata = builder.build();
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId());
update.setJobId(job1.getId() + "_2"); update.setJobId(job1.getId() + "_2");
expectThrows(ResourceNotFoundException.class, expectThrows(ResourceNotFoundException.class,
() -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null)); () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap()));
} }
public void testUpdateDatafeed_failBecauseNewJobHasAnotherDatafeedAttached() { public void testUpdateDatafeed_failBecauseNewJobHasAnotherDatafeedAttached() {
@ -319,25 +340,46 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putJob(job2.build(), false); builder.putJob(job2.build(), false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
builder.putDatafeed(datafeedConfig2, null); builder.putDatafeed(datafeedConfig2, Collections.emptyMap());
MlMetadata beforeMetadata = builder.build(); MlMetadata beforeMetadata = builder.build();
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId());
update.setJobId(job2.getId()); update.setJobId(job2.getId());
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null)); () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap()));
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
assertThat(e.getMessage(), equalTo("A datafeed [datafeed2] already exists for job [job_id_2]")); assertThat(e.getMessage(), equalTo("A datafeed [datafeed2] already exists for job [job_id_2]"));
} }
public void testUpdateDatafeed_setsSecurityHeaders() {
Job datafeedJob = createDatafeedJob().build(new Date());
DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", datafeedJob.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(datafeedJob, false);
builder.putDatafeed(datafeedConfig, Collections.emptyMap());
MlMetadata beforeMetadata = builder.build();
assertTrue(beforeMetadata.getDatafeed("datafeed1").getHeaders().isEmpty());
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig.getId());
update.setQueryDelay(TimeValue.timeValueMinutes(5));
Map<String, String> headers = new HashMap<>();
headers.put("unrelated_header", "unrelated_header_value");
headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user");
MlMetadata afterMetadata = new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, headers).build();
Map<String, String> updatedHeaders = afterMetadata.getDatafeed("datafeed1").getHeaders();
assertThat(updatedHeaders.size(), equalTo(1));
assertThat(updatedHeaders, hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user"));
}
public void testRemoveDatafeed_failBecauseDatafeedStarted() { public void testRemoveDatafeed_failBecauseDatafeedStarted() {
Job job1 = createDatafeedJob().build(new Date()); Job job1 = createDatafeedJob().build(new Date());
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1, null); builder.putDatafeed(datafeedConfig1, Collections.emptyMap());
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("job_id"), sameInstance(job1)); assertThat(result.getJobs().get("job_id"), sameInstance(job1));
@ -378,9 +420,9 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
public void testExpandDatafeedIds() { public void testExpandDatafeedIds() {
MlMetadata.Builder mlMetadataBuilder = newMlMetadataWithJobs("bar-1", "foo-1", "foo-2"); MlMetadata.Builder mlMetadataBuilder = newMlMetadataWithJobs("bar-1", "foo-1", "foo-2");
mlMetadataBuilder.putDatafeed(createDatafeedConfig("bar-1-feed", "bar-1").build(), null); mlMetadataBuilder.putDatafeed(createDatafeedConfig("bar-1-feed", "bar-1").build(), Collections.emptyMap());
mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-1-feed", "foo-1").build(), null); mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-1-feed", "foo-1").build(), Collections.emptyMap());
mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-2-feed", "foo-2").build(), null); mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-2-feed", "foo-2").build(), Collections.emptyMap());
MlMetadata mlMetadata = mlMetadataBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build();
@ -409,7 +451,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
metadataBuilder.putJob(entry.getValue(), true); metadataBuilder.putJob(entry.getValue(), true);
} }
for (Map.Entry<String, DatafeedConfig> entry : datafeeds.entrySet()) { for (Map.Entry<String, DatafeedConfig> entry : datafeeds.entrySet()) {
metadataBuilder.putDatafeed(entry.getValue(), null); metadataBuilder.putDatafeed(entry.getValue(), Collections.emptyMap());
} }
switch (between(0, 1)) { switch (between(0, 1)) {
@ -430,7 +472,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
} }
randomJob = new Job.Builder(randomJob).setAnalysisConfig(analysisConfig).build(); randomJob = new Job.Builder(randomJob).setAnalysisConfig(analysisConfig).build();
metadataBuilder.putJob(randomJob, false); metadataBuilder.putJob(randomJob, false);
metadataBuilder.putDatafeed(datafeedConfig, null); metadataBuilder.putDatafeed(datafeedConfig, Collections.emptyMap());
break; break;
default: default:
throw new AssertionError("Illegal randomisation branch"); throw new AssertionError("Illegal randomisation branch");

View File

@ -51,7 +51,7 @@ public class TransportCloseJobActionTests extends ESTestCase {
MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); MlMetadata.Builder mlBuilder = new MlMetadata.Builder();
mlBuilder.putJob(BaseMlIntegTestCase.createScheduledJob("job_id").build(new Date()), false); mlBuilder.putJob(BaseMlIntegTestCase.createScheduledJob("job_id").build(new Date()), false);
mlBuilder.putDatafeed(BaseMlIntegTestCase.createDatafeed("datafeed_id", "job_id", mlBuilder.putDatafeed(BaseMlIntegTestCase.createDatafeed("datafeed_id", "job_id",
Collections.singletonList("*")), null); Collections.singletonList("*")), Collections.emptyMap());
final PersistentTasksCustomMetaData.Builder startDataFeedTaskBuilder = PersistentTasksCustomMetaData.builder(); final PersistentTasksCustomMetaData.Builder startDataFeedTaskBuilder = PersistentTasksCustomMetaData.builder();
addJobTask("job_id", null, JobState.OPENED, startDataFeedTaskBuilder); addJobTask("job_id", null, JobState.OPENED, startDataFeedTaskBuilder);
addTask("datafeed_id", 0L, null, DatafeedState.STARTED, startDataFeedTaskBuilder); addTask("datafeed_id", 0L, null, DatafeedState.STARTED, startDataFeedTaskBuilder);

View File

@ -45,7 +45,7 @@ public class TransportStartDatafeedActionTests extends ESTestCase {
PersistentTasksCustomMetaData tasks = PersistentTasksCustomMetaData.builder().build(); PersistentTasksCustomMetaData tasks = PersistentTasksCustomMetaData.builder().build();
DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1)
.putDatafeed(datafeedConfig1, null) .putDatafeed(datafeedConfig1, Collections.emptyMap())
.build(); .build();
Exception e = expectThrows(ElasticsearchStatusException.class, Exception e = expectThrows(ElasticsearchStatusException.class,
() -> TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks)); () -> TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks));
@ -62,7 +62,7 @@ public class TransportStartDatafeedActionTests extends ESTestCase {
PersistentTasksCustomMetaData tasks = tasksBuilder.build(); PersistentTasksCustomMetaData tasks = tasksBuilder.build();
DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1)
.putDatafeed(datafeedConfig1, null) .putDatafeed(datafeedConfig1, Collections.emptyMap())
.build(); .build();
TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks);
@ -78,7 +78,7 @@ public class TransportStartDatafeedActionTests extends ESTestCase {
PersistentTasksCustomMetaData tasks = tasksBuilder.build(); PersistentTasksCustomMetaData tasks = tasksBuilder.build();
DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1)
.putDatafeed(datafeedConfig1, null) .putDatafeed(datafeedConfig1, Collections.emptyMap())
.build(); .build();
TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks);

View File

@ -42,7 +42,7 @@ public class TransportStopDatafeedActionTests extends ESTestCase {
DatafeedConfig datafeedConfig = createDatafeedConfig("foo", "job_id").build(); DatafeedConfig datafeedConfig = createDatafeedConfig("foo", "job_id").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false) MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false)
.putDatafeed(datafeedConfig, null) .putDatafeed(datafeedConfig, Collections.emptyMap())
.build(); .build();
TransportStopDatafeedAction.validateDatafeedTask("foo", mlMetadata2); TransportStopDatafeedAction.validateDatafeedTask("foo", mlMetadata2);
} }
@ -54,12 +54,12 @@ public class TransportStopDatafeedActionTests extends ESTestCase {
addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder); addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder);
Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()); Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date());
DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build(); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build();
mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap());
addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder); addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder);
job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date()); job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date());
datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build(); datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build();
mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap());
PersistentTasksCustomMetaData tasks = tasksBuilder.build(); PersistentTasksCustomMetaData tasks = tasksBuilder.build();
MlMetadata mlMetadata = mlMetadataBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build();
@ -86,17 +86,17 @@ public class TransportStopDatafeedActionTests extends ESTestCase {
addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder); addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder);
Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()); Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date());
DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build(); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build();
mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap());
addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder); addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder);
job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date()); job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date());
datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build(); datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build();
mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap());
addTask("datafeed_3", 0L, "node-1", DatafeedState.STOPPING, tasksBuilder); addTask("datafeed_3", 0L, "node-1", DatafeedState.STOPPING, tasksBuilder);
job = BaseMlIntegTestCase.createScheduledJob("job_id_3").build(new Date()); job = BaseMlIntegTestCase.createScheduledJob("job_id_3").build(new Date());
datafeedConfig = createDatafeedConfig("datafeed_3", "job_id_3").build(); datafeedConfig = createDatafeedConfig("datafeed_3", "job_id_3").build();
mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap());
PersistentTasksCustomMetaData tasks = tasksBuilder.build(); PersistentTasksCustomMetaData tasks = tasksBuilder.build();
MlMetadata mlMetadata = mlMetadataBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build();

View File

@ -84,7 +84,7 @@ public class DatafeedManagerTests extends ESTestCase {
Job job = createDatafeedJob().build(new Date()); Job job = createDatafeedJob().build(new Date());
mlMetadata.putJob(job, false); mlMetadata.putJob(job, false);
DatafeedConfig datafeed = createDatafeedConfig("datafeed_id", job.getId()).build(); DatafeedConfig datafeed = createDatafeedConfig("datafeed_id", job.getId()).build();
mlMetadata.putDatafeed(datafeed, null); mlMetadata.putDatafeed(datafeed, Collections.emptyMap());
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
addJobTask(job.getId(), "node_id", JobState.OPENED, tasksBuilder); addJobTask(job.getId(), "node_id", JobState.OPENED, tasksBuilder);
PersistentTasksCustomMetaData tasks = tasksBuilder.build(); PersistentTasksCustomMetaData tasks = tasksBuilder.build();

View File

@ -68,7 +68,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder();
Job job = createScheduledJob("job_id").build(new Date()); Job job = createScheduledJob("job_id").build(new Date());
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -86,7 +86,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder();
Job job = createScheduledJob("job_id").build(new Date()); Job job = createScheduledJob("job_id").build(new Date());
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -106,7 +106,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
// Using wildcard index name to test for index resolving as well // Using wildcard index name to test for index resolving as well
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
tasks = PersistentTasksCustomMetaData.builder().build(); tasks = PersistentTasksCustomMetaData.builder().build();
@ -128,7 +128,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder();
Job job = createScheduledJob("job_id").build(new Date()); Job job = createScheduledJob("job_id").build(new Date());
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -156,7 +156,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
// Using wildcard index name to test for index resolving as well // Using wildcard index name to test for index resolving as well
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -182,7 +182,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
// Using wildcard index name to test for index resolving as well // Using wildcard index name to test for index resolving as well
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -207,7 +207,8 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder();
Job job = createScheduledJob("job_id").build(new Date()); Job job = createScheduledJob("job_id").build(new Date());
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")),
Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -231,7 +232,8 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder();
Job job = createScheduledJob("job_id").build(new Date()); Job job = createScheduledJob("job_id").build(new Date());
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("remote:foo")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("remote:foo")),
Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
@ -248,7 +250,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder();
Job job = createScheduledJob("job_id").build(new Date()); Job job = createScheduledJob("job_id").build(new Date());
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
String nodeId = randomBoolean() ? "node_id2" : null; String nodeId = randomBoolean() ? "node_id2" : null;
@ -286,7 +288,8 @@ public class DatafeedNodeSelectorTests extends ESTestCase {
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder();
Job job = createScheduledJob("job_id").build(new Date()); Job job = createScheduledJob("job_id").build(new Date());
mlMetadataBuilder.putJob(job, false); mlMetadataBuilder.putJob(job, false);
mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), null); mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")),
Collections.emptyMap());
mlMetadata = mlMetadataBuilder.build(); mlMetadata = mlMetadataBuilder.build();
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();

View File

@ -6,7 +6,6 @@
package org.elasticsearch.xpack.ml.integration; package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
@ -47,7 +46,7 @@ public class DeleteJobIT extends BaseMlIntegTestCase {
} }
@Override @Override
public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
markAsDeletedLatch.countDown(); markAsDeletedLatch.countDown();
} }
}); });
@ -90,7 +89,7 @@ public class DeleteJobIT extends BaseMlIntegTestCase {
} }
@Override @Override
public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
removeJobLatch.countDown(); removeJobLatch.countDown();
} }
}); });