Moved job lifecycle over to persistent tasks

Also replaced the DELETING status from JobState with a boolean flag on Job. The state of a job is now stored inside a persistent task in cluster state. Jobs that aren't running don't have a persistent task, so I moved that notion of being deleted to the job config itself.

Original commit: elastic/x-pack-elasticsearch@b0ed82124d
This commit is contained in:
Martijn van Groningen 2017-02-08 21:31:06 +01:00
parent af2486b834
commit c6763489d5
62 changed files with 1294 additions and 1509 deletions

View File

@ -14,6 +14,7 @@ import org.elasticsearch.cluster.NamedDiff;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
@ -23,6 +24,7 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ActionPlugin;
@ -30,6 +32,7 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ExecutorBuilder;
import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -50,7 +53,6 @@ import org.elasticsearch.xpack.ml.action.GetJobsAction;
import org.elasticsearch.xpack.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.ml.action.GetModelSnapshotsAction;
import org.elasticsearch.xpack.ml.action.GetRecordsAction; import org.elasticsearch.xpack.ml.action.GetRecordsAction;
import org.elasticsearch.xpack.ml.action.InternalOpenJobAction;
import org.elasticsearch.xpack.ml.action.MlDeleteByQueryAction; import org.elasticsearch.xpack.ml.action.MlDeleteByQueryAction;
import org.elasticsearch.xpack.ml.action.OpenJobAction; import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PostDataAction; import org.elasticsearch.xpack.ml.action.PostDataAction;
@ -60,7 +62,6 @@ import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.ml.action.StopDatafeedAction;
import org.elasticsearch.xpack.ml.action.UpdateJobStateAction;
import org.elasticsearch.xpack.ml.action.UpdateJobAction; import org.elasticsearch.xpack.ml.action.UpdateJobAction;
import org.elasticsearch.xpack.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.ml.action.UpdateModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.UpdateProcessAction; import org.elasticsearch.xpack.ml.action.UpdateProcessAction;
@ -68,6 +69,7 @@ import org.elasticsearch.xpack.ml.action.ValidateDetectorAction;
import org.elasticsearch.xpack.ml.action.ValidateJobConfigAction; import org.elasticsearch.xpack.ml.action.ValidateJobConfigAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner; import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.MlInitializationService; import org.elasticsearch.xpack.ml.job.metadata.MlInitializationService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister;
@ -179,11 +181,13 @@ public class MlPlugin extends Plugin implements ActionPlugin {
return Arrays.asList( return Arrays.asList(
new NamedWriteableRegistry.Entry(MetaData.Custom.class, "ml", MlMetadata::new), new NamedWriteableRegistry.Entry(MetaData.Custom.class, "ml", MlMetadata::new),
new NamedWriteableRegistry.Entry(NamedDiff.class, "ml", MlMetadata.MlMetadataDiff::new), new NamedWriteableRegistry.Entry(NamedDiff.class, "ml", MlMetadata.MlMetadataDiff::new),
new NamedWriteableRegistry.Entry(PersistentActionCoordinator.Status.class, new NamedWriteableRegistry.Entry(Task.Status.class, PersistentActionCoordinator.Status.NAME,
PersistentActionCoordinator.Status.NAME, PersistentActionCoordinator.Status::new), PersistentActionCoordinator.Status::new),
new NamedWriteableRegistry.Entry(ClusterState.Custom.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::new), new NamedWriteableRegistry.Entry(ClusterState.Custom.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::new),
new NamedWriteableRegistry.Entry(NamedDiff.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::readDiffFrom), new NamedWriteableRegistry.Entry(NamedDiff.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::readDiffFrom),
new NamedWriteableRegistry.Entry(PersistentActionRequest.class, StartDatafeedAction.NAME, StartDatafeedAction.Request::new) new NamedWriteableRegistry.Entry(PersistentActionRequest.class, StartDatafeedAction.NAME, StartDatafeedAction.Request::new),
new NamedWriteableRegistry.Entry(PersistentActionRequest.class, OpenJobAction.NAME, OpenJobAction.Request::new),
new NamedWriteableRegistry.Entry(Task.Status.class, JobState.NAME, JobState::fromStream)
); );
} }
@ -204,8 +208,15 @@ public class MlPlugin extends Plugin implements ActionPlugin {
if (false == enabled) { if (false == enabled) {
return emptyList(); return emptyList();
} }
// Whether we are using native process is a good way to detect whether we are in dev / test mode:
TimeValue delayedNodeTimeOutSetting;
if (USE_NATIVE_PROCESS_OPTION.get(settings)) {
delayedNodeTimeOutSetting = UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(settings);
} else {
delayedNodeTimeOutSetting = TimeValue.timeValueNanos(0);
}
JobResultsPersister jobResultsPersister = new JobResultsPersister(settings, client); JobResultsPersister jobResultsPersister = new JobResultsPersister(settings, client);
JobProvider jobProvider = new JobProvider(client, 0); JobProvider jobProvider = new JobProvider(client, 1, delayedNodeTimeOutSetting);
JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(settings, client); JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(settings, client);
JobManager jobManager = new JobManager(settings, jobProvider, jobResultsPersister, clusterService); JobManager jobManager = new JobManager(settings, jobProvider, jobResultsPersister, clusterService);
@ -233,7 +244,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory); jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory);
DatafeedJobRunner datafeedJobRunner = new DatafeedJobRunner(threadPool, client, clusterService, jobProvider, DatafeedJobRunner datafeedJobRunner = new DatafeedJobRunner(threadPool, client, clusterService, jobProvider,
System::currentTimeMillis); System::currentTimeMillis);
PersistentActionService persistentActionService = new PersistentActionService(Settings.EMPTY, clusterService, client); PersistentActionService persistentActionService = new PersistentActionService(Settings.EMPTY, threadPool, clusterService, client);
PersistentActionRegistry persistentActionRegistry = new PersistentActionRegistry(Settings.EMPTY); PersistentActionRegistry persistentActionRegistry = new PersistentActionRegistry(Settings.EMPTY);
return Arrays.asList( return Arrays.asList(
@ -301,8 +312,6 @@ public class MlPlugin extends Plugin implements ActionPlugin {
new ActionHandler<>(UpdateJobAction.INSTANCE, UpdateJobAction.TransportAction.class), new ActionHandler<>(UpdateJobAction.INSTANCE, UpdateJobAction.TransportAction.class),
new ActionHandler<>(DeleteJobAction.INSTANCE, DeleteJobAction.TransportAction.class), new ActionHandler<>(DeleteJobAction.INSTANCE, DeleteJobAction.TransportAction.class),
new ActionHandler<>(OpenJobAction.INSTANCE, OpenJobAction.TransportAction.class), new ActionHandler<>(OpenJobAction.INSTANCE, OpenJobAction.TransportAction.class),
new ActionHandler<>(InternalOpenJobAction.INSTANCE, InternalOpenJobAction.TransportAction.class),
new ActionHandler<>(UpdateJobStateAction.INSTANCE, UpdateJobStateAction.TransportAction.class),
new ActionHandler<>(GetFiltersAction.INSTANCE, GetFiltersAction.TransportAction.class), new ActionHandler<>(GetFiltersAction.INSTANCE, GetFiltersAction.TransportAction.class),
new ActionHandler<>(PutFilterAction.INSTANCE, PutFilterAction.TransportAction.class), new ActionHandler<>(PutFilterAction.INSTANCE, PutFilterAction.TransportAction.class),
new ActionHandler<>(DeleteFilterAction.INSTANCE, DeleteFilterAction.TransportAction.class), new ActionHandler<>(DeleteFilterAction.INSTANCE, DeleteFilterAction.TransportAction.class),

View File

@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
@ -18,30 +17,45 @@ import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelT
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.utils.JobStateObserver; import org.elasticsearch.xpack.persistent.PersistentTaskClusterService;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import java.io.IOException; import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.function.Predicate;
public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobAction.Response, CloseJobAction.RequestBuilder> { public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobAction.Response, CloseJobAction.RequestBuilder> {
@ -62,10 +76,27 @@ public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobActio
return new Response(); return new Response();
} }
public static class Request extends ActionRequest { public static class Request extends MasterNodeRequest<Request> implements ToXContent {
public static final ParseField TIMEOUT = new ParseField("timeout");
public static ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
static {
PARSER.declareString(Request::setJobId, Job.ID);
PARSER.declareString((request, val) ->
request.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT);
}
public static Request parseRequest(String jobId, XContentParser parser) {
Request request = PARSER.apply(parser, null);
if (jobId != null) {
request.jobId = jobId;
}
return request;
}
private String jobId; private String jobId;
private TimeValue closeTimeout = TimeValue.timeValueMinutes(20); private TimeValue timeout = TimeValue.timeValueMinutes(20);
Request() {} Request() {}
@ -77,12 +108,16 @@ public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobActio
return jobId; return jobId;
} }
public TimeValue getCloseTimeout() { public void setJobId(String jobId) {
return closeTimeout; this.jobId = jobId;
} }
public void setCloseTimeout(TimeValue closeTimeout) { public TimeValue getTimeout() {
this.closeTimeout = closeTimeout; return timeout;
}
public void setTimeout(TimeValue timeout) {
this.timeout = timeout;
} }
@Override @Override
@ -94,19 +129,28 @@ public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobActio
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
jobId = in.readString(); jobId = in.readString();
closeTimeout = new TimeValue(in); timeout = new TimeValue(in);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(jobId); out.writeString(jobId);
closeTimeout.writeTo(out); timeout.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep());
builder.endObject();
return builder;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(jobId, closeTimeout); return Objects.hash(jobId, timeout);
} }
@Override @Override
@ -119,7 +163,7 @@ public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobActio
} }
Request other = (Request) obj; Request other = (Request) obj;
return Objects.equals(jobId, other.jobId) && return Objects.equals(jobId, other.jobId) &&
Objects.equals(closeTimeout, other.closeTimeout); Objects.equals(timeout, other.timeout);
} }
} }
@ -179,71 +223,135 @@ public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobActio
} }
} }
public static class TransportAction extends HandledTransportAction<Request, Response> { public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
private final ClusterService clusterService; private final ClusterService clusterService;
private final JobStateObserver jobStateObserver;
private final TransportListTasksAction listTasksAction; private final TransportListTasksAction listTasksAction;
private final TransportCancelTasksAction cancelTasksAction; private final TransportCancelTasksAction cancelTasksAction;
private final PersistentTaskClusterService persistentTaskClusterService;
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ClusterService clusterService, TransportCancelTasksAction cancelTasksAction, ClusterService clusterService, TransportListTasksAction listTasksAction,
TransportListTasksAction listTasksAction) { TransportCancelTasksAction cancelTasksAction, PersistentTaskClusterService persistentTaskClusterService) {
super(settings, CloseJobAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); super(settings, CloseJobAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new);
this.clusterService = clusterService; this.clusterService = clusterService;
this.jobStateObserver = new JobStateObserver(threadPool, clusterService);
this.cancelTasksAction = cancelTasksAction;
this.listTasksAction = listTasksAction; this.listTasksAction = listTasksAction;
this.cancelTasksAction = cancelTasksAction;
this.persistentTaskClusterService = persistentTaskClusterService;
} }
@Override @Override
protected void doExecute(Request request, ActionListener<Response> listener) { protected String executor() {
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE); return ThreadPool.Names.SAME;
validate(request.jobId, mlMetadata); }
ListTasksRequest listTasksRequest = new ListTasksRequest(); @Override
listTasksRequest.setActions(InternalOpenJobAction.NAME); protected Response newResponse() {
listTasksRequest.setDetailed(true); return new Response();
listTasksAction.execute(listTasksRequest, ActionListener.wrap(listTasksResponse -> { }
String expectedJobDescription = "job-" + request.jobId;
for (TaskInfo taskInfo : listTasksResponse.getTasks()) { @Override
if (expectedJobDescription.equals(taskInfo.getDescription())) { protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
CancelTasksRequest cancelTasksRequest = new CancelTasksRequest(); PersistentTaskInProgress<?> task = validateAndFindTask(request.getJobId(), state);
cancelTasksRequest.setTaskId(taskInfo.getTaskId()); clusterService.submitStateUpdateTask("closing job [" + request.getJobId() + "]", new ClusterStateUpdateTask() {
cancelTasksAction.execute(cancelTasksRequest, ActionListener.wrap( @Override
cancelTasksResponse -> { public ClusterState execute(ClusterState currentState) throws Exception {
jobStateObserver.waitForState(request.jobId, request.closeTimeout, JobState.CLOSED, return moveJobToClosingState(request.getJobId(), currentState);
e -> {
if (e != null) {
listener.onFailure(e);
} else {
listener.onResponse(new CloseJobAction.Response(true));
}
}
);
},
listener::onFailure)
);
return;
}
} }
listener.onFailure(new ResourceNotFoundException("No job [" + request.jobId + "] running"));
}, listener::onFailure)); @Override
public void onFailure(String source, Exception e) {
listener.onFailure(e);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setDetailed(true);
listTasksRequest.setActions(OpenJobAction.NAME + "[c]");
listTasksAction.execute(listTasksRequest, ActionListener.wrap(listTasksResponse -> {
String expectedDescription = "job-" + request.getJobId();
for (TaskInfo taskInfo : listTasksResponse.getTasks()) {
if (expectedDescription.equals(taskInfo.getDescription())) {
CancelTasksRequest cancelTasksRequest = new CancelTasksRequest();
cancelTasksRequest.setTaskId(taskInfo.getTaskId());
cancelTasksAction.execute(cancelTasksRequest, ActionListener.wrap(cancelTaskResponse -> {
persistentTaskClusterService.completeOrRestartPersistentTask(task.getId(), null,
ActionListener.wrap(
empty -> listener.onResponse(new CloseJobAction.Response(true)),
listener::onFailure
)
);
}, listener::onFailure));
return;
}
}
listener.onFailure(new ResourceNotFoundException("task not found for job [" + request.getJobId() + "]"));
}, listener::onFailure));
}
});
} }
static void validate(String jobId, MlMetadata mlMetadata) { @Override
Allocation allocation = mlMetadata.getAllocations().get(jobId); protected ClusterBlockException checkBlock(Request request, ClusterState state) {
if (allocation == null) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
throw ExceptionsHelper.missingJobException(jobId); }
} }
if (allocation.getState() != JobState.OPENED) { static PersistentTaskInProgress<?> validateAndFindTask(String jobId, ClusterState state) {
throw new ElasticsearchStatusException("job not opened, expected job state [{}], but got [{}]", MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
RestStatus.CONFLICT, JobState.OPENED, allocation.getState()); if (mlMetadata.getJobs().containsKey(jobId) == false) {
throw ExceptionsHelper.missingJobException(jobId);
}
PersistentTasksInProgress tasks = state.custom(PersistentTasksInProgress.TYPE);
if (tasks != null) {
Predicate<PersistentTaskInProgress<?>> p = t -> {
OpenJobAction.Request storedRequest = (OpenJobAction.Request) t.getRequest();
return storedRequest.getJobId().equals(jobId);
};
for (PersistentTaskInProgress<?> task : tasks.findTasks(OpenJobAction.NAME, p)) {
OpenJobAction.Request storedRequest = (OpenJobAction.Request) task.getRequest();
if (storedRequest.getJobId().equals(jobId)) {
JobState jobState = (JobState) task.getStatus();
if (jobState != JobState.OPENED) {
throw new ElasticsearchStatusException("cannot close job, expected job state [{}], but got [{}]",
RestStatus.CONFLICT, JobState.OPENED, jobState);
}
return task;
}
} }
} }
throw new ElasticsearchStatusException("cannot close job, expected job state [{}], but got [{}]",
RestStatus.CONFLICT, JobState.OPENED, JobState.CLOSED);
}
static ClusterState moveJobToClosingState(String jobId, ClusterState currentState) {
PersistentTaskInProgress<?> task = validateAndFindTask(jobId, currentState);
PersistentTasksInProgress currentTasks = currentState.custom(PersistentTasksInProgress.TYPE);
Map<Long, PersistentTaskInProgress<?>> updatedTasks = new HashMap<>(currentTasks.taskMap());
for (PersistentTaskInProgress<?> taskInProgress : currentTasks.tasks()) {
if (taskInProgress.getId() == task.getId()) {
updatedTasks.put(taskInProgress.getId(), new PersistentTaskInProgress<>(taskInProgress, JobState.CLOSING));
}
}
PersistentTasksInProgress newTasks = new PersistentTasksInProgress(currentTasks.getCurrentId(), updatedTasks);
MlMetadata mlMetadata = currentState.metaData().custom(MlMetadata.TYPE);
Job.Builder jobBuilder = new Job.Builder(mlMetadata.getJobs().get(jobId));
jobBuilder.setFinishedTime(new Date());
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(mlMetadata);
mlMetadataBuilder.putJob(jobBuilder.build(), true);
ClusterState.Builder builder = ClusterState.builder(currentState);
return builder
.putCustom(PersistentTasksInProgress.TYPE, newTasks)
.metaData(new MetaData.Builder(currentState.metaData())
.putCustom(MlMetadata.TYPE, mlMetadataBuilder.build()))
.build();
} }
} }

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.BaseTasksResponse;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
@ -27,9 +28,9 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.process.autodetect.params.InterimResultsParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.InterimResultsParams;
import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
@ -242,15 +243,15 @@ public class FlushJobAction extends Action<FlushJobAction.Request, FlushJobActio
} }
} }
public static class TransportAction extends TransportJobTaskAction<InternalOpenJobAction.JobTask, Request, Response> { public static class TransportAction extends TransportJobTaskAction<OpenJobAction.JobTask, Request, Response> {
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
AutodetectProcessManager processManager, JobManager jobManager) { AutodetectProcessManager processManager, JobManager jobManager, TransportListTasksAction listTasksAction) {
super(settings, FlushJobAction.NAME, threadPool, clusterService, transportService, actionFilters, super(settings, FlushJobAction.NAME, threadPool, clusterService, transportService, actionFilters,
indexNameExpressionResolver, FlushJobAction.Request::new, FlushJobAction.Response::new, MlPlugin.THREAD_POOL_NAME, indexNameExpressionResolver, FlushJobAction.Request::new, FlushJobAction.Response::new, MlPlugin.THREAD_POOL_NAME,
jobManager, processManager, Request::getJobId); jobManager, processManager, Request::getJobId, listTasksAction);
} }
@Override @Override
@ -261,7 +262,7 @@ public class FlushJobAction extends Action<FlushJobAction.Request, FlushJobActio
} }
@Override @Override
protected void taskOperation(Request request, InternalOpenJobAction.JobTask task, protected void taskOperation(Request request, OpenJobAction.JobTask task,
ActionListener<FlushJobAction.Response> listener) { ActionListener<FlushJobAction.Response> listener) {
jobManager.getJobOrThrowIfUnknown(request.getJobId()); jobManager.getJobOrThrowIfUnknown(request.getJobId());

View File

@ -42,6 +42,7 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManage
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -97,7 +98,7 @@ public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJo
@Override @Override
public boolean match(Task task) { public boolean match(Task task) {
return jobId.equals(Job.ALL) || InternalOpenJobAction.JobTask.match(task, jobId); return jobId.equals(Job.ALL) || OpenJobAction.JobTask.match(task, jobId);
} }
@Override @Override
@ -290,7 +291,7 @@ public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJo
} }
} }
public static class TransportAction extends TransportTasksAction<InternalOpenJobAction.JobTask, Request, Response, public static class TransportAction extends TransportTasksAction<OpenJobAction.JobTask, Request, Response,
QueryPage<Response.JobStats>> { QueryPage<Response.JobStats>> {
private final ClusterService clusterService; private final ClusterService clusterService;
@ -342,13 +343,13 @@ public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJo
} }
@Override @Override
protected void taskOperation(Request request, InternalOpenJobAction.JobTask task, protected void taskOperation(Request request, OpenJobAction.JobTask task,
ActionListener<QueryPage<Response.JobStats>> listener) { ActionListener<QueryPage<Response.JobStats>> listener) {
logger.debug("Get stats for job '{}'", request.getJobId()); logger.debug("Get stats for job '{}'", request.getJobId());
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE); PersistentTasksInProgress tasks = clusterService.state().custom(PersistentTasksInProgress.TYPE);
Optional<Tuple<DataCounts, ModelSizeStats>> stats = processManager.getStatistics(request.getJobId()); Optional<Tuple<DataCounts, ModelSizeStats>> stats = processManager.getStatistics(request.getJobId());
if (stats.isPresent()) { if (stats.isPresent()) {
JobState jobState = mlMetadata.getAllocations().get(request.jobId).getState(); JobState jobState = MlMetadata.getJobState(request.jobId, tasks);
Response.JobStats jobStats = new Response.JobStats(request.jobId, stats.get().v1(), stats.get().v2(), jobState); Response.JobStats jobStats = new Response.JobStats(request.jobId, stats.get().v1(), stats.get().v2(), jobState);
listener.onResponse(new QueryPage<>(Collections.singletonList(jobStats), 1, Job.RESULTS_FIELD)); listener.onResponse(new QueryPage<>(Collections.singletonList(jobStats), 1, Job.RESULTS_FIELD));
} else { } else {
@ -365,14 +366,14 @@ public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJo
return; return;
} }
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
AtomicInteger counter = new AtomicInteger(jobIds.size()); AtomicInteger counter = new AtomicInteger(jobIds.size());
AtomicArray<Response.JobStats> jobStats = new AtomicArray<>(jobIds.size()); AtomicArray<Response.JobStats> jobStats = new AtomicArray<>(jobIds.size());
PersistentTasksInProgress tasks = clusterService.state().custom(PersistentTasksInProgress.TYPE);
for (int i = 0; i < jobIds.size(); i++) { for (int i = 0; i < jobIds.size(); i++) {
int slot = i; int slot = i;
String jobId = jobIds.get(i); String jobId = jobIds.get(i);
gatherDataCountsAndModelSizeStats(jobId, (dataCounts, modelSizeStats) -> { gatherDataCountsAndModelSizeStats(jobId, (dataCounts, modelSizeStats) -> {
JobState jobState = mlMetadata.getAllocations().get(jobId).getState(); JobState jobState = MlMetadata.getJobState(request.jobId, tasks);
jobStats.set(slot, new Response.JobStats(jobId, dataCounts, modelSizeStats, jobState)); jobStats.set(slot, new Response.JobStats(jobId, dataCounts, modelSizeStats, jobState));
if (counter.decrementAndGet() == 0) { if (counter.decrementAndGet() == 0) {
List<Response.JobStats> results = response.getResponse().results(); List<Response.JobStats> results = response.getResponse().results();

View File

@ -1,135 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
public class InternalOpenJobAction extends Action<InternalOpenJobAction.Request, InternalOpenJobAction.Response,
InternalOpenJobAction.RequestBuilder> {
public static final InternalOpenJobAction INSTANCE = new InternalOpenJobAction();
public static final String NAME = "cluster:admin/ml/anomaly_detectors/internal_open";
private InternalOpenJobAction() {
super(NAME);
}
@Override
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new RequestBuilder(client, this);
}
@Override
public Response newResponse() {
return new Response();
}
public static class Request extends OpenJobAction.Request {
public Request(String jobId) {
super(jobId);
}
Request() {
super();
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new JobTask(getJobId(), id, type, action, parentTaskId);
}
}
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
RequestBuilder(ElasticsearchClient client, InternalOpenJobAction action) {
super(client, action, new Request());
}
}
public static class Response extends ActionResponse {
Response() {}
}
public static class JobTask extends CancellableTask {
private volatile Runnable cancelHandler;
JobTask(String jobId, long id, String type, String action, TaskId parentTask) {
super(id, type, action, "job-" + jobId, parentTask);
}
@Override
public boolean shouldCancelChildrenOnCancellation() {
return true;
}
@Override
protected void onCancelled() {
cancelHandler.run();
}
static boolean match(Task task, String expectedJobId) {
String expectedDescription = "job-" + expectedJobId;
return task instanceof JobTask && expectedDescription.equals(task.getDescription());
}
}
public static class TransportAction extends HandledTransportAction<Request, Response> {
private final AutodetectProcessManager autodetectProcessManager;
@Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
AutodetectProcessManager autodetectProcessManager) {
super(settings, InternalOpenJobAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
Request::new);
this.autodetectProcessManager = autodetectProcessManager;
}
@Override
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
JobTask jobTask = (JobTask) task;
autodetectProcessManager.setJobState(request.getJobId(), JobState.OPENING, aVoid -> {
jobTask.cancelHandler = () -> autodetectProcessManager.closeJob(request.getJobId());
autodetectProcessManager.openJob(request.getJobId(), request.isIgnoreDowntime(), e -> {
if (e == null) {
listener.onResponse(new Response());
} else {
listener.onFailure(e);
}
});
}, listener::onFailure);
}
@Override
protected void doExecute(Request request, ActionListener<Response> listener) {
throw new IllegalStateException("shouldn't get invoked");
}
}
}

View File

@ -5,38 +5,55 @@
*/ */
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.LoggingTaskListener; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.utils.JobStateObserver; import org.elasticsearch.xpack.ml.utils.JobStateObserver;
import org.elasticsearch.xpack.persistent.PersistentActionRegistry;
import org.elasticsearch.xpack.persistent.PersistentActionRequest;
import org.elasticsearch.xpack.persistent.PersistentActionResponse;
import org.elasticsearch.xpack.persistent.PersistentActionService;
import org.elasticsearch.xpack.persistent.PersistentTask;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import org.elasticsearch.xpack.persistent.TransportPersistentAction;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
import java.util.function.Consumer;
public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.Response, OpenJobAction.RequestBuilder> { public class OpenJobAction extends Action<OpenJobAction.Request, PersistentActionResponse, OpenJobAction.RequestBuilder> {
public static final OpenJobAction INSTANCE = new OpenJobAction(); public static final OpenJobAction INSTANCE = new OpenJobAction();
public static final String NAME = "cluster:admin/ml/anomaly_detectors/open"; public static final String NAME = "cluster:admin/ml/anomaly_detectors/open";
@ -51,22 +68,43 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
} }
@Override @Override
public Response newResponse() { public PersistentActionResponse newResponse() {
return new Response(); return new PersistentActionResponse();
} }
public static class Request extends ActionRequest { public static class Request extends PersistentActionRequest {
public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime"); public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime");
public static final ParseField TIMEOUT = new ParseField("timeout");
public static ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
static {
PARSER.declareString(Request::setJobId, Job.ID);
PARSER.declareBoolean(Request::setIgnoreDowntime, IGNORE_DOWNTIME);
PARSER.declareString((request, val) ->
request.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT);
}
public static Request parseRequest(String jobId, XContentParser parser) {
Request request = PARSER.apply(parser, null);
if (jobId != null) {
request.jobId = jobId;
}
return request;
}
private String jobId; private String jobId;
private boolean ignoreDowntime; private boolean ignoreDowntime;
private TimeValue openTimeout = TimeValue.timeValueSeconds(20); private TimeValue timeout = TimeValue.timeValueSeconds(20);
public Request(String jobId) { public Request(String jobId) {
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
} }
public Request(StreamInput in) throws IOException {
readFrom(in);
}
Request() {} Request() {}
public String getJobId() { public String getJobId() {
@ -85,12 +123,17 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
this.ignoreDowntime = ignoreDowntime; this.ignoreDowntime = ignoreDowntime;
} }
public TimeValue getOpenTimeout() { public TimeValue getTimeout() {
return openTimeout; return timeout;
} }
public void setOpenTimeout(TimeValue openTimeout) { public void setTimeout(TimeValue timeout) {
this.openTimeout = openTimeout; this.timeout = timeout;
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new JobTask(getJobId(), id, type, action, parentTaskId);
} }
@Override @Override
@ -103,7 +146,7 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
super.readFrom(in); super.readFrom(in);
jobId = in.readString(); jobId = in.readString();
ignoreDowntime = in.readBoolean(); ignoreDowntime = in.readBoolean();
openTimeout = TimeValue.timeValueMillis(in.readVLong()); timeout = TimeValue.timeValueMillis(in.readVLong());
} }
@Override @Override
@ -111,12 +154,27 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
super.writeTo(out); super.writeTo(out);
out.writeString(jobId); out.writeString(jobId);
out.writeBoolean(ignoreDowntime); out.writeBoolean(ignoreDowntime);
out.writeVLong(openTimeout.millis()); out.writeVLong(timeout.millis());
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(IGNORE_DOWNTIME.getPreferredName(), ignoreDowntime);
builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep());
builder.endObject();
return builder;
}
@Override
public String getWriteableName() {
return NAME;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(jobId, ignoreDowntime, openTimeout); return Objects.hash(jobId, ignoreDowntime, timeout);
} }
@Override @Override
@ -130,107 +188,131 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
OpenJobAction.Request other = (OpenJobAction.Request) obj; OpenJobAction.Request other = (OpenJobAction.Request) obj;
return Objects.equals(jobId, other.jobId) && return Objects.equals(jobId, other.jobId) &&
Objects.equals(ignoreDowntime, other.ignoreDowntime) && Objects.equals(ignoreDowntime, other.ignoreDowntime) &&
Objects.equals(openTimeout, other.openTimeout); Objects.equals(timeout, other.timeout);
}
@Override
public String toString() {
return Strings.toString(this);
} }
} }
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> { public static class JobTask extends PersistentTask {
private volatile Consumer<String> cancelHandler;
JobTask(String jobId, long id, String type, String action, TaskId parentTask) {
super(id, type, action, "job-" + jobId, parentTask);
}
@Override
protected void onCancelled() {
String reason = CancelTasksRequest.DEFAULT_REASON.equals(getReasonCancelled()) ? null : getReasonCancelled();
cancelHandler.accept(reason);
}
static boolean match(Task task, String expectedJobId) {
String expectedDescription = "job-" + expectedJobId;
return task instanceof JobTask && expectedDescription.equals(task.getDescription());
}
}
static class RequestBuilder extends ActionRequestBuilder<Request, PersistentActionResponse, RequestBuilder> {
RequestBuilder(ElasticsearchClient client, OpenJobAction action) { RequestBuilder(ElasticsearchClient client, OpenJobAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
public static class Response extends ActionResponse implements ToXContentObject { public static class TransportAction extends TransportPersistentAction<Request> {
private boolean opened;
Response() {}
Response(boolean opened) {
this.opened = opened;
}
public boolean isOpened() {
return opened;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
opened = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(opened);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("opened", opened);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Response response = (Response) o;
return opened == response.opened;
}
@Override
public int hashCode() {
return Objects.hash(opened);
}
}
public static class TransportAction extends HandledTransportAction<Request, Response> {
private final JobStateObserver observer; private final JobStateObserver observer;
private final ClusterService clusterService; private final AutodetectProcessManager autodetectProcessManager;
private final InternalOpenJobAction.TransportAction internalOpenJobAction;
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
PersistentActionService persistentActionService, PersistentActionRegistry persistentActionRegistry,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ClusterService clusterService, InternalOpenJobAction.TransportAction internalOpenJobAction) { ClusterService clusterService, AutodetectProcessManager autodetectProcessManager) {
super(settings, OpenJobAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); super(settings, OpenJobAction.NAME, false, threadPool, transportService, persistentActionService,
this.clusterService = clusterService; persistentActionRegistry, actionFilters, indexNameExpressionResolver, Request::new, ThreadPool.Names.MANAGEMENT);
this.autodetectProcessManager = autodetectProcessManager;
this.observer = new JobStateObserver(threadPool, clusterService); this.observer = new JobStateObserver(threadPool, clusterService);
this.internalOpenJobAction = internalOpenJobAction;
} }
@Override @Override
protected void doExecute(Request request, ActionListener<Response> listener) { protected void doExecute(Request request, ActionListener<PersistentActionResponse> listener) {
// This validation happens also in InternalOpenJobAction, the reason we do it here too is that if it fails there ActionListener<PersistentActionResponse> finalListener =
// we are unable to provide the user immediate feedback. We would create the task and the validation would fail ActionListener.wrap(response -> waitForJobStarted(request, response, listener), listener::onFailure);
// in the background, whereas now the validation failure is part of the response being returned. super.doExecute(request, finalListener);
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE); }
validate(mlMetadata, request.getJobId());
InternalOpenJobAction.Request internalRequest = new InternalOpenJobAction.Request(request.jobId); void waitForJobStarted(Request request, PersistentActionResponse response, ActionListener<PersistentActionResponse> listener) {
internalOpenJobAction.execute(internalRequest, LoggingTaskListener.instance()); observer.waitForState(request.getJobId(), request.timeout, JobState.OPENED, e -> {
observer.waitForState(request.getJobId(), request.openTimeout, JobState.OPENED, e -> {
if (e != null) { if (e != null) {
listener.onFailure(e); listener.onFailure(e);
} else { } else {
listener.onResponse(new Response(true)); listener.onResponse(response);
} }
}); });
} }
/** @Override
* Fail fast before trying to update the job state on master node if the job doesn't exist or its state public void validate(Request request, ClusterState clusterState) {
* is not what it should be. MlMetadata mlMetadata = clusterState.metaData().custom(MlMetadata.TYPE);
*/ PersistentTasksInProgress tasks = clusterState.custom(PersistentTasksInProgress.TYPE);
public static void validate(MlMetadata mlMetadata, String jobId) { OpenJobAction.validate(request.getJobId(), mlMetadata, tasks, clusterState.nodes());
MlMetadata.Builder builder = new MlMetadata.Builder(mlMetadata); }
builder.updateState(jobId, JobState.OPENING, null);
@Override
protected void nodeOperation(PersistentTask task, Request request, ActionListener<TransportResponse.Empty> listener) {
autodetectProcessManager.setJobState(task.getPersistentTaskId(), JobState.OPENING, e1 -> {
if (e1 != null) {
listener.onFailure(e1);
return;
}
JobTask jobTask = (JobTask) task;
jobTask.cancelHandler = (reason) -> autodetectProcessManager.closeJob(request.getJobId(), reason);
autodetectProcessManager.openJob(request.getJobId(), task.getPersistentTaskId(), request.isIgnoreDowntime(), e2 -> {
if (e2 == null) {
listener.onResponse(new TransportResponse.Empty());
} else {
listener.onFailure(e2);
}
});
});
}
}
/**
* Fail fast before trying to update the job state on master node if the job doesn't exist or its state
* is not what it should be.
*/
static void validate(String jobId, MlMetadata mlMetadata, @Nullable PersistentTasksInProgress tasks, DiscoveryNodes nodes) {
Job job = mlMetadata.getJobs().get(jobId);
if (job == null) {
throw ExceptionsHelper.missingJobException(jobId);
}
if (job.isDeleted()) {
throw new ElasticsearchStatusException("Cannot open job [" + jobId + "] because it has been marked as deleted",
RestStatus.CONFLICT);
}
PersistentTaskInProgress<?> task = MlMetadata.getTask(jobId, tasks);
JobState jobState = MlMetadata.getJobState(jobId, tasks);
if (task != null && task.getExecutorNode() != null && jobState == JobState.OPENED) {
if (nodes.nodeExists(task.getExecutorNode()) == false) {
// The state is open and the node were running on no longer exists.
// We can skip the job state check below, because when the node
// disappeared we didn't have time to set the status to failed.
return;
}
}
if (jobState.isAnyOf(JobState.CLOSED, JobState.FAILED) == false) {
throw new ElasticsearchStatusException("[" + jobId + "] expected state [" + JobState.CLOSED
+ "] or [" + JobState.FAILED + "], but got [" + jobState +"]", RestStatus.CONFLICT);
} }
} }
} }

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.BaseTasksResponse;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
@ -26,12 +27,12 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.config.DataDescription; import org.elasticsearch.xpack.ml.job.config.DataDescription;
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams;
import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
@ -218,14 +219,14 @@ public class PostDataAction extends Action<PostDataAction.Request, PostDataActio
} }
public static class TransportAction extends TransportJobTaskAction<InternalOpenJobAction.JobTask, Request, Response> { public static class TransportAction extends TransportJobTaskAction<OpenJobAction.JobTask, Request, Response> {
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
JobManager jobManager, AutodetectProcessManager processManager) { JobManager jobManager, AutodetectProcessManager processManager, TransportListTasksAction listTasksAction) {
super(settings, PostDataAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, super(settings, PostDataAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
Request::new, Response::new, MlPlugin.THREAD_POOL_NAME, jobManager, processManager, Request::getJobId); Request::new, Response::new, MlPlugin.THREAD_POOL_NAME, jobManager, processManager, Request::getJobId, listTasksAction);
} }
@Override @Override
@ -236,17 +237,15 @@ public class PostDataAction extends Action<PostDataAction.Request, PostDataActio
} }
@Override @Override
protected void taskOperation(Request request, InternalOpenJobAction.JobTask task, ActionListener<Response> listener) { protected void taskOperation(Request request, OpenJobAction.JobTask task, ActionListener<Response> listener) {
TimeRange timeRange = TimeRange.builder().startTime(request.getResetStart()).endTime(request.getResetEnd()).build(); TimeRange timeRange = TimeRange.builder().startTime(request.getResetStart()).endTime(request.getResetEnd()).build();
DataLoadParams params = new DataLoadParams(timeRange, Optional.ofNullable(request.getDataDescription())); DataLoadParams params = new DataLoadParams(timeRange, Optional.ofNullable(request.getDataDescription()));
threadPool.executor(MlPlugin.THREAD_POOL_NAME).execute(() -> { try {
try { DataCounts dataCounts = processManager.processData(request.getJobId(), request.content.streamInput(), params);
DataCounts dataCounts = processManager.processData(request.getJobId(), request.content.streamInput(), params); listener.onResponse(new Response(dataCounts));
listener.onResponse(new Response(dataCounts)); } catch (Exception e) {
} catch (Exception e) { listener.onFailure(e);
listener.onFailure(e); }
}
});
} }
} }

View File

@ -40,7 +40,6 @@ import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister;
import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter; import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
@ -287,8 +286,8 @@ extends Action<RevertModelSnapshotAction.Request, RevertModelSnapshotAction.Resp
request.getSnapshotId(), request.getJobId(), request.getDeleteInterveningResults()); request.getSnapshotId(), request.getJobId(), request.getDeleteInterveningResults());
QueryPage<Job> job = jobManager.getJob(request.getJobId(), clusterService.state()); QueryPage<Job> job = jobManager.getJob(request.getJobId(), clusterService.state());
Allocation allocation = jobManager.getJobAllocation(request.getJobId()); JobState jobState = jobManager.getJobState(request.getJobId());
if (job.count() > 0 && allocation.getState().equals(JobState.CLOSED) == false) { if (job.count() > 0 && jobState.equals(JobState.CLOSED) == false) {
throw ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.REST_JOB_NOT_CLOSED_REVERT)); throw ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.REST_JOB_NOT_CLOSED_REVERT));
} }

View File

@ -35,7 +35,6 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator;
import org.elasticsearch.xpack.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.ml.datafeed.DatafeedState;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.persistent.PersistentActionRegistry; import org.elasticsearch.xpack.persistent.PersistentActionRegistry;
@ -205,11 +204,6 @@ public class StartDatafeedAction
this.holder = holder; this.holder = holder;
} }
@Override
public boolean shouldCancelChildrenOnCancellation() {
return true;
}
@Override @Override
protected void onCancelled() { protected void onCancelled() {
stop(); stop();
@ -241,7 +235,8 @@ public class StartDatafeedAction
@Override @Override
public void validate(Request request, ClusterState clusterState) { public void validate(Request request, ClusterState clusterState) {
MlMetadata mlMetadata = clusterState.metaData().custom(MlMetadata.TYPE); MlMetadata mlMetadata = clusterState.metaData().custom(MlMetadata.TYPE);
StartDatafeedAction.validate(request.getDatafeedId(), mlMetadata); PersistentTasksInProgress tasks = clusterState.custom(PersistentTasksInProgress.TYPE);
StartDatafeedAction.validate(request.getDatafeedId(), mlMetadata, tasks);
PersistentTasksInProgress persistentTasksInProgress = clusterState.custom(PersistentTasksInProgress.TYPE); PersistentTasksInProgress persistentTasksInProgress = clusterState.custom(PersistentTasksInProgress.TYPE);
if (persistentTasksInProgress == null) { if (persistentTasksInProgress == null) {
return; return;
@ -273,7 +268,7 @@ public class StartDatafeedAction
} }
public static void validate(String datafeedId, MlMetadata mlMetadata) { public static void validate(String datafeedId, MlMetadata mlMetadata, PersistentTasksInProgress tasks) {
DatafeedConfig datafeed = mlMetadata.getDatafeed(datafeedId); DatafeedConfig datafeed = mlMetadata.getDatafeed(datafeedId);
if (datafeed == null) { if (datafeed == null) {
throw ExceptionsHelper.missingDatafeedException(datafeedId); throw ExceptionsHelper.missingDatafeedException(datafeedId);
@ -282,10 +277,10 @@ public class StartDatafeedAction
if (job == null) { if (job == null) {
throw ExceptionsHelper.missingJobException(datafeed.getJobId()); throw ExceptionsHelper.missingJobException(datafeed.getJobId());
} }
Allocation allocation = mlMetadata.getAllocations().get(datafeed.getJobId()); JobState jobState = MlMetadata.getJobState(datafeed.getJobId(), tasks);
if (allocation.getState() != JobState.OPENED) { if (jobState != JobState.OPENED) {
throw new ElasticsearchStatusException("cannot start datafeed, expected job state [{}], but got [{}]", throw new ElasticsearchStatusException("cannot start datafeed, expected job state [{}], but got [{}]",
RestStatus.CONFLICT, JobState.OPENED, allocation.getState()); RestStatus.CONFLICT, JobState.OPENED, jobState);
} }
DatafeedJobValidator.validate(datafeed, job); DatafeedJobValidator.validate(datafeed, job);
} }

View File

@ -146,14 +146,14 @@ public class StopDatafeedAction
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE); MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
validate(datafeedId, mlMetadata); validate(datafeedId, mlMetadata);
PersistentTasksInProgress tasksInProgress = state.custom(PersistentTasksInProgress.TYPE); PersistentTasksInProgress tasks = state.custom(PersistentTasksInProgress.TYPE);
if (tasksInProgress != null) { if (tasks != null) {
for (PersistentTaskInProgress<?> taskInProgress : tasksInProgress.findTasks(StartDatafeedAction.NAME, p -> true)) { for (PersistentTaskInProgress<?> task : tasks.findTasks(StartDatafeedAction.NAME, p -> true)) {
StartDatafeedAction.Request storedRequest = (StartDatafeedAction.Request) taskInProgress.getRequest(); StartDatafeedAction.Request storedRequest = (StartDatafeedAction.Request) task.getRequest();
if (storedRequest.getDatafeedId().equals(datafeedId)) { if (storedRequest.getDatafeedId().equals(datafeedId)) {
RemovePersistentTaskAction.Request cancelTasksRequest = new RemovePersistentTaskAction.Request(); RemovePersistentTaskAction.Request removeTaskRequest = new RemovePersistentTaskAction.Request();
cancelTasksRequest.setTaskId(taskInProgress.getId()); removeTaskRequest.setTaskId(task.getId());
removePersistentTaskAction.execute(cancelTasksRequest, listener); removePersistentTaskAction.execute(removeTaskRequest, listener);
return; return;
} }
} }
@ -164,6 +164,7 @@ public class StopDatafeedAction
@Override @Override
protected ClusterBlockException checkBlock(Request request, ClusterState state) { protected ClusterBlockException checkBlock(Request request, ClusterState state) {
// Remove persistent action actually updates cs, here we just read it.
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
} }

View File

@ -7,8 +7,12 @@ package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksRequest;
import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.BaseTasksResponse;
@ -21,13 +25,13 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException; import java.io.IOException;
@ -46,17 +50,50 @@ public abstract class TransportJobTaskAction<OperationTask extends Task, Request
protected final JobManager jobManager; protected final JobManager jobManager;
protected final AutodetectProcessManager processManager; protected final AutodetectProcessManager processManager;
private final Function<Request, String> jobIdFromRequest; private final Function<Request, String> jobIdFromRequest;
private final TransportListTasksAction listTasksAction;
TransportJobTaskAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportJobTaskAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, ActionFilters actionFilters, TransportService transportService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, Supplier<Request> requestSupplier, IndexNameExpressionResolver indexNameExpressionResolver, Supplier<Request> requestSupplier,
Supplier<Response> responseSupplier, String nodeExecutor, JobManager jobManager, Supplier<Response> responseSupplier, String nodeExecutor, JobManager jobManager,
AutodetectProcessManager processManager, Function<Request, String> jobIdFromRequest) { AutodetectProcessManager processManager, Function<Request, String> jobIdFromRequest,
TransportListTasksAction listTasksAction) {
super(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, super(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
requestSupplier, responseSupplier, nodeExecutor); requestSupplier, responseSupplier, nodeExecutor);
this.jobManager = jobManager; this.jobManager = jobManager;
this.processManager = processManager; this.processManager = processManager;
this.jobIdFromRequest = jobIdFromRequest; this.jobIdFromRequest = jobIdFromRequest;
this.listTasksAction = listTasksAction;
}
@Override
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
// the same validation that exists in AutodetectProcessManager#processData(...) and flush(...) methods
// is required here too because if the job hasn't been opened yet then no task exist for it yet and then
// #taskOperation(...) method will not be invoked, returning an empty result to the client.
// This ensures that we return an understandable error:
String jobId = jobIdFromRequest.apply(request);
jobManager.getJobOrThrowIfUnknown(jobId);
JobState jobState = jobManager.getJobState(jobId);
if (jobState != JobState.OPENED) {
listener.onFailure( new ElasticsearchStatusException("job [" + jobId + "] state is [" + jobState +
"], but must be [" + JobState.OPENED + "] to perform requested action", RestStatus.CONFLICT));
} else {
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setDetailed(true);
listTasksRequest.setActions(OpenJobAction.NAME + "[c]");
listTasksAction.execute(listTasksRequest, ActionListener.wrap(listTasksResponse -> {
String expectedDescription = "job-" + jobId;
for (TaskInfo taskInfo : listTasksResponse.getTasks()) {
if (expectedDescription.equals(taskInfo.getDescription())) {
request.setTaskId(taskInfo.getTaskId());
super.doExecute(task, request, listener);
return;
}
}
listener.onFailure(new ResourceNotFoundException("task not found for job [" + jobId + "] " + listTasksResponse));
}, listener::onFailure));
}
} }
@Override @Override
@ -71,19 +108,7 @@ public abstract class TransportJobTaskAction<OperationTask extends Task, Request
} else if (failedNodeExceptions.isEmpty() == false) { } else if (failedNodeExceptions.isEmpty() == false) {
throw new ElasticsearchException(failedNodeExceptions.get(0).getCause()); throw new ElasticsearchException(failedNodeExceptions.get(0).getCause());
} else { } else {
// the same validation that exists in AutodetectProcessManager#processData(...) and flush(...) methods throw new IllegalStateException("No errors or response");
// is required here too because if the job hasn't been opened yet then no task exist for it yet and then
// #taskOperation(...) method will not be invoked, returning an empty result to the client.
// This ensures that we return an understandable error:
String jobId = jobIdFromRequest.apply(request);
jobManager.getJobOrThrowIfUnknown(jobId);
Allocation allocation = jobManager.getJobAllocation(jobId);
if (allocation.getState() != JobState.OPENED) {
throw new ElasticsearchStatusException("job [" + jobId + "] state is [" + allocation.getState() +
"], but must be [" + JobState.OPENED + "] to perform requested action", RestStatus.CONFLICT);
} else {
throw new IllegalStateException("No errors or response");
}
} }
} else { } else {
if (tasks.size() > 1) { if (tasks.size() > 1) {
@ -127,7 +152,7 @@ public abstract class TransportJobTaskAction<OperationTask extends Task, Request
@Override @Override
public boolean match(Task task) { public boolean match(Task task) {
return InternalOpenJobAction.JobTask.match(task, jobId); return OpenJobAction.JobTask.match(task, jobId);
} }
} }
} }

View File

@ -1,194 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Objects;
public class UpdateJobStateAction
extends Action<UpdateJobStateAction.Request, UpdateJobStateAction.Response, UpdateJobStateAction.RequestBuilder> {
public static final UpdateJobStateAction INSTANCE = new UpdateJobStateAction();
public static final String NAME = "cluster:admin/ml/anomaly_detectors/state/update";
private UpdateJobStateAction() {
super(NAME);
}
@Override
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new RequestBuilder(client, this);
}
@Override
public Response newResponse() {
return new Response();
}
public static class Request extends AcknowledgedRequest<Request> {
private String jobId;
private JobState state;
private String reason;
public Request(String jobId, JobState state) {
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
this.state = ExceptionsHelper.requireNonNull(state, Allocation.STATE.getPreferredName());
}
Request() {}
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
public JobState getState() {
return state;
}
public void setState(JobState state) {
this.state = state;
}
public String getReason() {
return reason;
}
public void setReason(String reason) {
this.reason = reason;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
jobId = in.readString();
state = JobState.fromStream(in);
reason = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(jobId);
state.writeTo(out);
out.writeOptionalString(reason);
}
@Override
public int hashCode() {
return Objects.hash(jobId, state);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != getClass()) {
return false;
}
UpdateJobStateAction.Request other = (UpdateJobStateAction.Request) obj;
return Objects.equals(jobId, other.jobId) && Objects.equals(state, other.state);
}
}
static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
RequestBuilder(ElasticsearchClient client, UpdateJobStateAction action) {
super(client, action, new Request());
}
}
public static class Response extends AcknowledgedResponse {
public Response(boolean acknowledged) {
super(acknowledged);
}
private Response() {}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
}
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
private final JobManager jobManager;
@Inject
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
JobManager jobManager) {
super(settings, UpdateJobStateAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new);
this.jobManager = jobManager;
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected Response newResponse() {
return new Response();
}
@Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
jobManager.setJobState(request, listener);
}
@Override
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
}
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.action.support.tasks.BaseTasksResponse;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
@ -176,14 +177,15 @@ public class UpdateProcessAction extends
} }
} }
public static class TransportAction extends TransportJobTaskAction<InternalOpenJobAction.JobTask, Request, Response> { public static class TransportAction extends TransportJobTaskAction<OpenJobAction.JobTask, Request, Response> {
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
JobManager jobManager, AutodetectProcessManager processManager) { JobManager jobManager, AutodetectProcessManager processManager, TransportListTasksAction listTasksAction) {
super(settings, NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, super(settings, NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
Request::new, Response::new, MlPlugin.THREAD_POOL_NAME, jobManager, processManager, Request::getJobId); Request::new, Response::new, MlPlugin.THREAD_POOL_NAME, jobManager, processManager, Request::getJobId,
listTasksAction);
} }
@Override @Override
@ -194,7 +196,7 @@ public class UpdateProcessAction extends
} }
@Override @Override
protected void taskOperation(Request request, InternalOpenJobAction.JobTask task, ActionListener<Response> listener) { protected void taskOperation(Request request, OpenJobAction.JobTask task, ActionListener<Response> listener) {
threadPool.executor(MlPlugin.THREAD_POOL_NAME).execute(() -> { threadPool.executor(MlPlugin.THREAD_POOL_NAME).execute(() -> {
try { try {
if (request.getModelDebugConfig() != null) { if (request.getModelDebugConfig() != null) {

View File

@ -7,15 +7,18 @@ package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.Task;
import java.io.IOException; import java.io.IOException;
import java.util.Locale; import java.util.Locale;
public enum DatafeedState implements Writeable { public enum DatafeedState implements Task.Status {
STARTED, STOPPED; STARTED, STOPPED;
public static final String NAME = "DatafeedState";
public static DatafeedState fromString(String name) { public static DatafeedState fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT)); return valueOf(name.trim().toUpperCase(Locale.ROOT));
} }
@ -28,11 +31,22 @@ public enum DatafeedState implements Writeable {
return values()[ordinal]; return values()[ordinal];
} }
@Override
public String getWriteableName() {
return NAME;
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(ordinal()); out.writeVInt(ordinal());
} }
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.value(this.toString().toLowerCase(Locale.ROOT));
return builder;
}
@Override @Override
public String toString() { public String toString() {
return name().toLowerCase(Locale.ROOT); return name().toLowerCase(Locale.ROOT);

View File

@ -6,11 +6,12 @@
package org.elasticsearch.xpack.ml.job; package org.elasticsearch.xpack.ml.job;
import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.ack.AckedRequest; import org.elasticsearch.cluster.ack.AckedRequest;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
@ -20,14 +21,12 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.ml.action.DeleteJobAction; import org.elasticsearch.xpack.ml.action.DeleteJobAction;
import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.UpdateJobStateAction;
import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.action.util.QueryPage;
import org.elasticsearch.xpack.ml.job.config.IgnoreDowntime; import org.elasticsearch.xpack.ml.job.config.IgnoreDowntime;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.config.JobUpdate; import org.elasticsearch.xpack.ml.job.config.JobUpdate;
import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
@ -37,6 +36,7 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.notifications.Auditor;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@ -134,8 +134,9 @@ public class JobManager extends AbstractComponent {
return getJobOrThrowIfUnknown(clusterService.state(), jobId); return getJobOrThrowIfUnknown(clusterService.state(), jobId);
} }
public Allocation getJobAllocation(String jobId) { public JobState getJobState(String jobId) {
return getAllocation(clusterService.state(), jobId); PersistentTasksInProgress tasks = clusterService.state().custom(PersistentTasksInProgress.TYPE);
return MlMetadata.getJobState(jobId, tasks);
} }
/** /**
@ -258,47 +259,54 @@ public class JobManager extends AbstractComponent {
return acknowledged && response; return acknowledged && response;
} }
@Override @Override
public ClusterState execute(ClusterState currentState) throws Exception { public ClusterState execute(ClusterState currentState) throws Exception {
return removeJobFromState(jobId, currentState); MlMetadata.Builder builder = createMlMetadataBuilder(currentState);
} builder.deleteJob(jobId, currentState.custom(PersistentTasksInProgress.TYPE));
}); return buildNewClusterState(currentState, builder);
}
});
// Step 1. When the job's status updates to DELETING, begin deleting the physical storage // Step 1. When the job has been marked as deleted then begin deleting the physical storage
// ------- // -------
CheckedConsumer<UpdateJobStateAction.Response, Exception> updateHandler = response -> { CheckedConsumer<Boolean, Exception> updateHandler = response -> {
// Successfully updated the status to DELETING, begin actually deleting // Successfully updated the status to DELETING, begin actually deleting
if (response.isAcknowledged()) { if (response) {
logger.info("Job [" + jobId + "] set to [" + JobState.DELETING + "]"); logger.info("Job [" + jobId + "] is successfully marked as deleted");
} else { } else {
logger.warn("Job [" + jobId + "] change to [" + JobState.DELETING + "] was not acknowledged."); logger.warn("Job [" + jobId + "] marked as deleted wan't acknowledged");
} }
// This task manages the physical deletion of the job (removing the results, then the index) // This task manages the physical deletion of the job (removing the results, then the index)
task.delete(jobId, indexName, client, deleteJobStateHandler::accept, actionListener::onFailure); task.delete(jobId, indexName, client, deleteJobStateHandler::accept, actionListener::onFailure);
}; };
// Step 0. Kick off the chain of callbacks with the initial UpdateStatus call // Step 0. Kick off the chain of callbacks with the initial UpdateStatus call
// ------- // -------
UpdateJobStateAction.Request updateStateListener = new UpdateJobStateAction.Request(jobId, JobState.DELETING); clusterService.submitStateUpdateTask("mark-job-as-deleted", new ClusterStateUpdateTask() {
setJobState(updateStateListener, ActionListener.wrap(updateHandler, actionListener::onFailure)); @Override
public ClusterState execute(ClusterState currentState) throws Exception {
MlMetadata currentMlMetadata = currentState.metaData().custom(MlMetadata.TYPE);
PersistentTasksInProgress tasks = currentState.custom(PersistentTasksInProgress.TYPE);
MlMetadata.Builder builder = new MlMetadata.Builder(currentMlMetadata);
builder.markJobAsDeleted(jobId, tasks);
return buildNewClusterState(currentState, builder);
}
} @Override
public void onFailure(String source, Exception e) {
actionListener.onFailure(e);
}
ClusterState removeJobFromState(String jobId, ClusterState currentState) { @Override
MlMetadata.Builder builder = createMlMetadataBuilder(currentState); public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) {
builder.deleteJob(jobId); try {
return buildNewClusterState(currentState, builder); updateHandler.accept(true);
} } catch (Exception e) {
actionListener.onFailure(e);
private Allocation getAllocation(ClusterState state, String jobId) { }
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE); }
Allocation allocation = mlMetadata.getAllocations().get(jobId); });
if (allocation == null) {
throw new ResourceNotFoundException("No allocation found for job with id [" + jobId + "]");
}
return allocation;
} }
public Auditor audit(String jobId) { public Auditor audit(String jobId) {
@ -338,26 +346,6 @@ public class JobManager extends AbstractComponent {
}); });
} }
public void setJobState(UpdateJobStateAction.Request request, ActionListener<UpdateJobStateAction.Response> actionListener) {
clusterService.submitStateUpdateTask("set-job-state-" + request.getState() + "-" + request.getJobId(),
new AckedClusterStateUpdateTask<UpdateJobStateAction.Response>(request, actionListener) {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MlMetadata.Builder builder = new MlMetadata.Builder(currentState.metaData().custom(MlMetadata.TYPE));
builder.updateState(request.getJobId(), request.getState(), request.getReason());
return ClusterState.builder(currentState)
.metaData(MetaData.builder(currentState.metaData()).putCustom(MlMetadata.TYPE, builder.build()))
.build();
}
@Override
protected UpdateJobStateAction.Response newResponse(boolean acknowledged) {
return new UpdateJobStateAction.Response(acknowledged);
}
});
}
/** /**
* Update a persisted model snapshot metadata document to match the * Update a persisted model snapshot metadata document to match the
* argument supplied. * argument supplied.

View File

@ -61,6 +61,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days"); public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
public static final ParseField INDEX_NAME = new ParseField("index_name"); public static final ParseField INDEX_NAME = new ParseField("index_name");
public static final ParseField DELETED = new ParseField("deleted");
// Used for QueryPage // Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("jobs"); public static final ParseField RESULTS_FIELD = new ParseField("jobs");
@ -113,6 +114,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
PARSER.declareString(Builder::setIndexName, INDEX_NAME); PARSER.declareString(Builder::setIndexName, INDEX_NAME);
PARSER.declareBoolean(Builder::setDeleted, DELETED);
} }
private final String jobId; private final String jobId;
@ -133,13 +135,13 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
private final Map<String, Object> customSettings; private final Map<String, Object> customSettings;
private final String modelSnapshotId; private final String modelSnapshotId;
private final String indexName; private final String indexName;
private final boolean deleted;
public Job(String jobId, String description, Date createTime, Date finishedTime, Date lastDataTime, public Job(String jobId, String description, Date createTime, Date finishedTime, Date lastDataTime,
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription, AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
ModelDebugConfig modelDebugConfig, IgnoreDowntime ignoreDowntime, ModelDebugConfig modelDebugConfig, IgnoreDowntime ignoreDowntime,
Long renormalizationWindowDays, Long backgroundPersistInterval, Long modelSnapshotRetentionDays, Long resultsRetentionDays, Long renormalizationWindowDays, Long backgroundPersistInterval, Long modelSnapshotRetentionDays, Long resultsRetentionDays,
Map<String, Object> customSettings, String modelSnapshotId, String indexName) { Map<String, Object> customSettings, String modelSnapshotId, String indexName, boolean deleted) {
if (analysisConfig == null) { if (analysisConfig == null) {
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_MISSING_ANALYSISCONFIG)); throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_MISSING_ANALYSISCONFIG));
} }
@ -179,6 +181,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
this.customSettings = customSettings; this.customSettings = customSettings;
this.modelSnapshotId = modelSnapshotId; this.modelSnapshotId = modelSnapshotId;
this.indexName = indexName; this.indexName = indexName;
this.deleted = deleted;
} }
public Job(StreamInput in) throws IOException { public Job(StreamInput in) throws IOException {
@ -199,6 +202,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
customSettings = in.readMap(); customSettings = in.readMap();
modelSnapshotId = in.readOptionalString(); modelSnapshotId = in.readOptionalString();
indexName = in.readString(); indexName = in.readString();
deleted = in.readBoolean();
} }
/** /**
@ -338,6 +342,10 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
return modelSnapshotId; return modelSnapshotId;
} }
public boolean isDeleted() {
return deleted;
}
/** /**
* Get a list of all input data fields mentioned in the job configuration, * Get a list of all input data fields mentioned in the job configuration,
* namely analysis fields and the time field. * namely analysis fields and the time field.
@ -395,6 +403,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
out.writeMap(customSettings); out.writeMap(customSettings);
out.writeOptionalString(modelSnapshotId); out.writeOptionalString(modelSnapshotId);
out.writeString(indexName); out.writeString(indexName);
out.writeBoolean(deleted);
} }
@Override @Override
@ -453,6 +462,9 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId); builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId);
} }
builder.field(INDEX_NAME.getPreferredName(), indexName); builder.field(INDEX_NAME.getPreferredName(), indexName);
if (params.paramAsBoolean("all", false)) {
builder.field(DELETED.getPreferredName(), deleted);
}
return builder; return builder;
} }
@ -481,7 +493,8 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
&& Objects.equals(this.customSettings, that.customSettings) && Objects.equals(this.customSettings, that.customSettings)
&& Objects.equals(this.modelSnapshotId, that.modelSnapshotId) && Objects.equals(this.modelSnapshotId, that.modelSnapshotId)
&& Objects.equals(this.indexName, that.indexName); && Objects.equals(this.indexName, that.indexName)
&& Objects.equals(this.deleted, that.deleted);
} }
@Override @Override
@ -489,7 +502,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
return Objects.hash(jobId, description, createTime, finishedTime, lastDataTime, analysisConfig, return Objects.hash(jobId, description, createTime, finishedTime, lastDataTime, analysisConfig,
analysisLimits, dataDescription, modelDebugConfig, renormalizationWindowDays, analysisLimits, dataDescription, modelDebugConfig, renormalizationWindowDays,
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, ignoreDowntime, customSettings, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, ignoreDowntime, customSettings,
modelSnapshotId, indexName); modelSnapshotId, indexName, deleted);
} }
// Class alreadt extends from AbstractDiffable, so copied from ToXContentToBytes#toString() // Class alreadt extends from AbstractDiffable, so copied from ToXContentToBytes#toString()
@ -524,6 +537,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
private Map<String, Object> customSettings; private Map<String, Object> customSettings;
private String modelSnapshotId; private String modelSnapshotId;
private String indexName; private String indexName;
private boolean deleted;
public Builder() { public Builder() {
} }
@ -634,6 +648,10 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
this.indexName = indexName; this.indexName = indexName;
} }
public void setDeleted(boolean deleted) {
this.deleted = deleted;
}
public Job build() { public Job build() {
return build(false, null); return build(false, null);
} }
@ -665,8 +683,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
id, description, createTime, finishedTime, lastDataTime, analysisConfig, analysisLimits, id, description, createTime, finishedTime, lastDataTime, analysisConfig, analysisLimits,
dataDescription, modelDebugConfig, ignoreDowntime, renormalizationWindowDays, dataDescription, modelDebugConfig, ignoreDowntime, renormalizationWindowDays,
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, modelSnapshotId, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, modelSnapshotId,
indexName indexName, deleted);
);
} }
} }
} }

View File

@ -7,7 +7,8 @@ package org.elasticsearch.xpack.ml.job.config;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.tasks.Task;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -18,9 +19,11 @@ import java.util.Locale;
* When a job is created it is initialised in to the state closed * When a job is created it is initialised in to the state closed
* i.e. it is not running. * i.e. it is not running.
*/ */
public enum JobState implements Writeable { public enum JobState implements Task.Status {
CLOSING, CLOSED, OPENING, OPENED, FAILED, DELETING; CLOSING, CLOSED, OPENING, OPENED, FAILED;
public static final String NAME = "JobState";
public static JobState fromString(String name) { public static JobState fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT)); return valueOf(name.trim().toUpperCase(Locale.ROOT));
@ -34,11 +37,22 @@ public enum JobState implements Writeable {
return values()[ordinal]; return values()[ordinal];
} }
@Override
public String getWriteableName() {
return NAME;
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(ordinal()); out.writeVInt(ordinal());
} }
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.value(this.toString().toLowerCase(Locale.ROOT));
return builder;
}
/** /**
* @return {@code true} if state matches any of the given {@code candidates} * @return {@code true} if state matches any of the given {@code candidates}
*/ */

View File

@ -1,204 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.job.metadata;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState;
import java.io.IOException;
import java.util.Objects;
public class Allocation extends AbstractDiffable<Allocation> implements ToXContent {
private static final ParseField NODE_ID_FIELD = new ParseField("node_id");
private static final ParseField JOB_ID_FIELD = new ParseField("job_id");
private static final ParseField IGNORE_DOWNTIME_FIELD = new ParseField("ignore_downtime");
public static final ParseField STATE = new ParseField("state");
public static final ParseField STATE_REASON = new ParseField("state_reason");
static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("allocation", Builder::new);
static {
PARSER.declareString(Builder::setNodeId, NODE_ID_FIELD);
PARSER.declareString(Builder::setJobId, JOB_ID_FIELD);
PARSER.declareBoolean(Builder::setIgnoreDowntime, IGNORE_DOWNTIME_FIELD);
PARSER.declareField(Builder::setState, (p, c) -> JobState.fromString(p.text()), STATE, ObjectParser.ValueType.STRING);
PARSER.declareString(Builder::setStateReason, STATE_REASON);
}
private final String nodeId;
private final String jobId;
private final boolean ignoreDowntime;
private final JobState state;
private final String stateReason;
public Allocation(String nodeId, String jobId, boolean ignoreDowntime, JobState state, String stateReason) {
this.nodeId = nodeId;
this.jobId = jobId;
this.ignoreDowntime = ignoreDowntime;
this.state = state;
this.stateReason = stateReason;
}
public Allocation(StreamInput in) throws IOException {
this.nodeId = in.readOptionalString();
this.jobId = in.readString();
this.ignoreDowntime = in.readBoolean();
this.state = JobState.fromStream(in);
this.stateReason = in.readOptionalString();
}
public String getNodeId() {
return nodeId;
}
public String getJobId() {
return jobId;
}
/**
* @return Whether to ignore downtime at startup.
*
* When the job state is set to STARTED, to ignoreDowntime will be set to false.
*/
public boolean isIgnoreDowntime() {
return ignoreDowntime;
}
public JobState getState() {
return state;
}
public String getStateReason() {
return stateReason;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(nodeId);
out.writeString(jobId);
out.writeBoolean(ignoreDowntime);
state.writeTo(out);
out.writeOptionalString(stateReason);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (nodeId != null) {
builder.field(NODE_ID_FIELD.getPreferredName(), nodeId);
}
builder.field(JOB_ID_FIELD.getPreferredName(), jobId);
builder.field(IGNORE_DOWNTIME_FIELD.getPreferredName(), ignoreDowntime);
builder.field(STATE.getPreferredName(), state);
if (stateReason != null) {
builder.field(STATE_REASON.getPreferredName(), stateReason);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Allocation that = (Allocation) o;
return Objects.equals(nodeId, that.nodeId) &&
Objects.equals(jobId, that.jobId) &&
Objects.equals(ignoreDowntime, that.ignoreDowntime) &&
Objects.equals(state, that.state) &&
Objects.equals(stateReason, that.stateReason);
}
@Override
public int hashCode() {
return Objects.hash(nodeId, jobId, ignoreDowntime, state, stateReason);
}
// Class already extends from AbstractDiffable, so copied from ToXContentToBytes#toString()
@Override
public final String toString() {
return Strings.toString(this);
}
public static class Builder {
private String nodeId;
private String jobId;
private boolean ignoreDowntime;
private JobState state;
private String stateReason;
public Builder() {
}
public Builder(Job job) {
this.jobId = job.getId();
}
public Builder(Allocation allocation) {
this.nodeId = allocation.nodeId;
this.jobId = allocation.jobId;
this.ignoreDowntime = allocation.ignoreDowntime;
this.state = allocation.state;
this.stateReason = allocation.stateReason;
}
public void setNodeId(String nodeId) {
this.nodeId = nodeId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
public void setIgnoreDowntime(boolean ignoreDownTime) {
this.ignoreDowntime = ignoreDownTime;
}
@SuppressWarnings("incomplete-switch")
public void setState(JobState newState) {
if (this.state != null) {
switch (newState) {
case CLOSING:
if (this.state != JobState.OPENED) {
throw new IllegalArgumentException("[" + jobId + "] expected state [" + JobState.OPENED
+ "], but got [" + state +"]");
}
break;
case OPENING:
if (this.state.isAnyOf(JobState.CLOSED, JobState.FAILED) == false) {
throw new IllegalArgumentException("[" + jobId + "] expected state [" + JobState.CLOSED
+ "] or [" + JobState.FAILED + "], but got [" + state +"]");
}
break;
case OPENED:
ignoreDowntime = false;
break;
}
}
this.state = newState;
}
public void setStateReason(String stateReason) {
this.stateReason = stateReason;
}
public Allocation build() {
return new Allocation(nodeId, jobId, ignoreDowntime, state, stateReason);
}
}
}

View File

@ -5,7 +5,6 @@
*/ */
package org.elasticsearch.xpack.ml.job.metadata; package org.elasticsearch.xpack.ml.job.metadata;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
@ -22,7 +21,7 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator; import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator;
@ -37,7 +36,6 @@ import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTa
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Date;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
@ -49,30 +47,22 @@ import java.util.function.Predicate;
public class MlMetadata implements MetaData.Custom { public class MlMetadata implements MetaData.Custom {
private static final ParseField JOBS_FIELD = new ParseField("jobs"); private static final ParseField JOBS_FIELD = new ParseField("jobs");
private static final ParseField ALLOCATIONS_FIELD = new ParseField("allocations");
private static final ParseField DATAFEEDS_FIELD = new ParseField("datafeeds"); private static final ParseField DATAFEEDS_FIELD = new ParseField("datafeeds");
public static final String TYPE = "ml"; public static final String TYPE = "ml";
public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(), public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(), Collections.emptySortedMap());
Collections.emptySortedMap(), Collections.emptySortedMap()); public static final ObjectParser<Builder, Void> ML_METADATA_PARSER = new ObjectParser<>("ml_metadata", Builder::new);
public static final ObjectParser<Builder, Void> ML_METADATA_PARSER = new ObjectParser<>("ml_metadata",
Builder::new);
static { static {
ML_METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.PARSER.apply(p, c).build(), JOBS_FIELD); ML_METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.PARSER.apply(p, c).build(), JOBS_FIELD);
ML_METADATA_PARSER.declareObjectArray(Builder::putAllocations, Allocation.PARSER, ALLOCATIONS_FIELD);
ML_METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, (p, c) -> DatafeedConfig.PARSER.apply(p, c).build(), DATAFEEDS_FIELD); ML_METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, (p, c) -> DatafeedConfig.PARSER.apply(p, c).build(), DATAFEEDS_FIELD);
} }
private final SortedMap<String, Job> jobs; private final SortedMap<String, Job> jobs;
private final SortedMap<String, Allocation> allocations;
private final SortedMap<String, DatafeedConfig> datafeeds; private final SortedMap<String, DatafeedConfig> datafeeds;
private MlMetadata(SortedMap<String, Job> jobs, SortedMap<String, Allocation> allocations, private MlMetadata(SortedMap<String, Job> jobs, SortedMap<String, DatafeedConfig> datafeeds) {
SortedMap<String, DatafeedConfig> datafeeds) {
this.jobs = Collections.unmodifiableSortedMap(jobs); this.jobs = Collections.unmodifiableSortedMap(jobs);
this.allocations = Collections.unmodifiableSortedMap(allocations);
this.datafeeds = Collections.unmodifiableSortedMap(datafeeds); this.datafeeds = Collections.unmodifiableSortedMap(datafeeds);
} }
@ -80,10 +70,6 @@ public class MlMetadata implements MetaData.Custom {
return jobs; return jobs;
} }
public SortedMap<String, Allocation> getAllocations() {
return allocations;
}
public SortedMap<String, DatafeedConfig> getDatafeeds() { public SortedMap<String, DatafeedConfig> getDatafeeds() {
return datafeeds; return datafeeds;
} }
@ -117,12 +103,6 @@ public class MlMetadata implements MetaData.Custom {
} }
this.jobs = jobs; this.jobs = jobs;
size = in.readVInt(); size = in.readVInt();
TreeMap<String, Allocation> allocations = new TreeMap<>();
for (int i = 0; i < size; i++) {
allocations.put(in.readString(), new Allocation(in));
}
this.allocations = allocations;
size = in.readVInt();
TreeMap<String, DatafeedConfig> datafeeds = new TreeMap<>(); TreeMap<String, DatafeedConfig> datafeeds = new TreeMap<>();
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
datafeeds.put(in.readString(), new DatafeedConfig(in)); datafeeds.put(in.readString(), new DatafeedConfig(in));
@ -133,7 +113,6 @@ public class MlMetadata implements MetaData.Custom {
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
writeMap(jobs, out); writeMap(jobs, out);
writeMap(allocations, out);
writeMap(datafeeds, out); writeMap(datafeeds, out);
} }
@ -148,7 +127,6 @@ public class MlMetadata implements MetaData.Custom {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
mapValuesToXContent(JOBS_FIELD, jobs, builder, params); mapValuesToXContent(JOBS_FIELD, jobs, builder, params);
mapValuesToXContent(ALLOCATIONS_FIELD, allocations, builder, params);
mapValuesToXContent(DATAFEEDS_FIELD, datafeeds, builder, params); mapValuesToXContent(DATAFEEDS_FIELD, datafeeds, builder, params);
return builder; return builder;
} }
@ -165,20 +143,16 @@ public class MlMetadata implements MetaData.Custom {
public static class MlMetadataDiff implements NamedDiff<MetaData.Custom> { public static class MlMetadataDiff implements NamedDiff<MetaData.Custom> {
final Diff<Map<String, Job>> jobs; final Diff<Map<String, Job>> jobs;
final Diff<Map<String, Allocation>> allocations;
final Diff<Map<String, DatafeedConfig>> datafeeds; final Diff<Map<String, DatafeedConfig>> datafeeds;
MlMetadataDiff(MlMetadata before, MlMetadata after) { MlMetadataDiff(MlMetadata before, MlMetadata after) {
this.jobs = DiffableUtils.diff(before.jobs, after.jobs, DiffableUtils.getStringKeySerializer()); this.jobs = DiffableUtils.diff(before.jobs, after.jobs, DiffableUtils.getStringKeySerializer());
this.allocations = DiffableUtils.diff(before.allocations, after.allocations, DiffableUtils.getStringKeySerializer());
this.datafeeds = DiffableUtils.diff(before.datafeeds, after.datafeeds, DiffableUtils.getStringKeySerializer()); this.datafeeds = DiffableUtils.diff(before.datafeeds, after.datafeeds, DiffableUtils.getStringKeySerializer());
} }
public MlMetadataDiff(StreamInput in) throws IOException { public MlMetadataDiff(StreamInput in) throws IOException {
this.jobs = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), Job::new, this.jobs = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), Job::new,
MlMetadataDiff::readJobDiffFrom); MlMetadataDiff::readJobDiffFrom);
this.allocations = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), Allocation::new,
MlMetadataDiff::readAllocationDiffFrom);
this.datafeeds = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), DatafeedConfig::new, this.datafeeds = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), DatafeedConfig::new,
MlMetadataDiff::readSchedulerDiffFrom); MlMetadataDiff::readSchedulerDiffFrom);
} }
@ -186,15 +160,13 @@ public class MlMetadata implements MetaData.Custom {
@Override @Override
public MetaData.Custom apply(MetaData.Custom part) { public MetaData.Custom apply(MetaData.Custom part) {
TreeMap<String, Job> newJobs = new TreeMap<>(jobs.apply(((MlMetadata) part).jobs)); TreeMap<String, Job> newJobs = new TreeMap<>(jobs.apply(((MlMetadata) part).jobs));
TreeMap<String, Allocation> newAllocations = new TreeMap<>(allocations.apply(((MlMetadata) part).allocations));
TreeMap<String, DatafeedConfig> newDatafeeds = new TreeMap<>(datafeeds.apply(((MlMetadata) part).datafeeds)); TreeMap<String, DatafeedConfig> newDatafeeds = new TreeMap<>(datafeeds.apply(((MlMetadata) part).datafeeds));
return new MlMetadata(newJobs, newAllocations, newDatafeeds); return new MlMetadata(newJobs, newDatafeeds);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
jobs.writeTo(out); jobs.writeTo(out);
allocations.writeTo(out);
datafeeds.writeTo(out); datafeeds.writeTo(out);
} }
@ -207,10 +179,6 @@ public class MlMetadata implements MetaData.Custom {
return AbstractDiffable.readDiffFrom(Job::new, in); return AbstractDiffable.readDiffFrom(Job::new, in);
} }
static Diff<Allocation> readAllocationDiffFrom(StreamInput in) throws IOException {
return AbstractDiffable.readDiffFrom(Allocation::new, in);
}
static Diff<DatafeedConfig> readSchedulerDiffFrom(StreamInput in) throws IOException { static Diff<DatafeedConfig> readSchedulerDiffFrom(StreamInput in) throws IOException {
return AbstractDiffable.readDiffFrom(DatafeedConfig::new, in); return AbstractDiffable.readDiffFrom(DatafeedConfig::new, in);
} }
@ -224,7 +192,6 @@ public class MlMetadata implements MetaData.Custom {
return false; return false;
MlMetadata that = (MlMetadata) o; MlMetadata that = (MlMetadata) o;
return Objects.equals(jobs, that.jobs) && return Objects.equals(jobs, that.jobs) &&
Objects.equals(allocations, that.allocations) &&
Objects.equals(datafeeds, that.datafeeds); Objects.equals(datafeeds, that.datafeeds);
} }
@ -235,24 +202,21 @@ public class MlMetadata implements MetaData.Custom {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(jobs, allocations, datafeeds); return Objects.hash(jobs, datafeeds);
} }
public static class Builder { public static class Builder {
private TreeMap<String, Job> jobs; private TreeMap<String, Job> jobs;
private TreeMap<String, Allocation> allocations;
private TreeMap<String, DatafeedConfig> datafeeds; private TreeMap<String, DatafeedConfig> datafeeds;
public Builder() { public Builder() {
this.jobs = new TreeMap<>(); this.jobs = new TreeMap<>();
this.allocations = new TreeMap<>();
this.datafeeds = new TreeMap<>(); this.datafeeds = new TreeMap<>();
} }
public Builder(MlMetadata previous) { public Builder(MlMetadata previous) {
jobs = new TreeMap<>(previous.jobs); jobs = new TreeMap<>(previous.jobs);
allocations = new TreeMap<>(previous.allocations);
datafeeds = new TreeMap<>(previous.datafeeds); datafeeds = new TreeMap<>(previous.datafeeds);
} }
@ -261,39 +225,27 @@ public class MlMetadata implements MetaData.Custom {
throw ExceptionsHelper.jobAlreadyExists(job.getId()); throw ExceptionsHelper.jobAlreadyExists(job.getId());
} }
this.jobs.put(job.getId(), job); this.jobs.put(job.getId(), job);
Allocation allocation = allocations.get(job.getId());
if (allocation == null) {
Allocation.Builder builder = new Allocation.Builder(job);
builder.setState(JobState.CLOSED);
allocations.put(job.getId(), builder.build());
}
return this; return this;
} }
public Builder deleteJob(String jobId) { public Builder deleteJob(String jobId, PersistentTasksInProgress tasks) {
Job job = jobs.remove(jobId);
if (job == null) {
throw new ResourceNotFoundException("job [" + jobId + "] does not exist");
}
Optional<DatafeedConfig> datafeed = getDatafeedByJobId(jobId); Optional<DatafeedConfig> datafeed = getDatafeedByJobId(jobId);
if (datafeed.isPresent()) { if (datafeed.isPresent()) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed [" throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
+ datafeed.get().getId() + "] refers to it"); + datafeed.get().getId() + "] refers to it");
} }
JobState jobState = MlMetadata.getJobState(jobId, tasks);
Allocation previousAllocation = this.allocations.remove(jobId); if (jobState.isAnyOf(JobState.CLOSED, JobState.FAILED) == false) {
if (previousAllocation != null) { throw ExceptionsHelper.conflictStatusException("Unexpected job state [" + jobState + "], expected [" +
if (!previousAllocation.getState().equals(JobState.DELETING)) { JobState.CLOSED + "]");
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] because it is in [" }
+ previousAllocation.getState() + "] state. Must be in [" + JobState.DELETING + "] state."); Job job = jobs.remove(jobId);
} if (job == null) {
} else { throw new ResourceNotFoundException("job [" + jobId + "] does not exist");
throw new ResourceNotFoundException("No Cluster State found for job [" + jobId + "]"); }
if (job.isDeleted() == false) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] because it hasn't marked as deleted");
} }
return this; return this;
} }
@ -341,15 +293,6 @@ public class MlMetadata implements MetaData.Custom {
return datafeeds.values().stream().filter(s -> s.getJobId().equals(jobId)).findFirst(); return datafeeds.values().stream().filter(s -> s.getJobId().equals(jobId)).findFirst();
} }
// only for parsing
private Builder putAllocations(Collection<Allocation.Builder> allocations) {
for (Allocation.Builder allocationBuilder : allocations) {
Allocation allocation = allocationBuilder.build();
this.allocations.put(allocation.getJobId(), allocation);
}
return this;
}
private Builder putJobs(Collection<Job> jobs) { private Builder putJobs(Collection<Job> jobs) {
for (Job job : jobs) { for (Job job : jobs) {
putJob(job, true); putJob(job, true);
@ -365,76 +308,55 @@ public class MlMetadata implements MetaData.Custom {
} }
public MlMetadata build() { public MlMetadata build() {
return new MlMetadata(jobs, allocations, datafeeds); return new MlMetadata(jobs, datafeeds);
} }
public Builder assignToNode(String jobId, String nodeId) { public void markJobAsDeleted(String jobId, PersistentTasksInProgress tasks) {
Allocation allocation = allocations.get(jobId); Job job = jobs.get(jobId);
if (allocation == null) { if (job == null) {
throw new IllegalStateException("[" + jobId + "] no allocation to assign to node [" + nodeId + "]");
}
Allocation.Builder builder = new Allocation.Builder(allocation);
builder.setNodeId(nodeId);
allocations.put(jobId, builder.build());
return this;
}
public Builder updateState(String jobId, JobState jobState, @Nullable String reason) {
if (jobs.containsKey(jobId) == false) {
throw ExceptionsHelper.missingJobException(jobId); throw ExceptionsHelper.missingJobException(jobId);
} }
if (job.isDeleted()) {
Allocation previous = allocations.get(jobId); // Job still exists
if (previous == null) { return;
throw new IllegalStateException("[" + jobId + "] no allocation exist to update the state to [" + jobState + "]");
} }
Optional<DatafeedConfig> datafeed = getDatafeedByJobId(jobId);
// Cannot update the state to DELETING if there are datafeeds attached if (datafeed.isPresent()) {
if (jobState.equals(JobState.DELETING)) { throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
Optional<DatafeedConfig> datafeed = getDatafeedByJobId(jobId); + datafeed.get().getId() + "] refers to it");
if (datafeed.isPresent()) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
+ datafeed.get().getId() + "] refers to it");
}
} }
JobState jobState = getJobState(jobId, tasks);
if (previous.getState().equals(JobState.DELETING)) { if (jobState.isAnyOf(JobState.CLOSED, JobState.FAILED) == false) {
// If we're already Deleting there's nothing to do throw ExceptionsHelper.conflictStatusException("Unexpected job state [" + jobState + "], expected [" +
if (jobState.equals(JobState.DELETING)) { JobState.CLOSED + "]");
return this;
}
// Once a job goes into Deleting, it cannot be changed
throw new ElasticsearchStatusException("Cannot change state of job [" + jobId + "] to [" + jobState + "] because " +
"it is currently in [" + JobState.DELETING + "] state.", RestStatus.CONFLICT);
} }
Allocation.Builder builder = new Allocation.Builder(previous); Job.Builder jobBuilder = new Job.Builder(job);
builder.setState(jobState); jobBuilder.setDeleted(true);
if (reason != null) { putJob(jobBuilder.build(), true);
builder.setStateReason(reason);
}
if (previous.getState() != jobState && jobState == JobState.CLOSED) {
Job.Builder job = new Job.Builder(this.jobs.get(jobId));
job.setFinishedTime(new Date());
this.jobs.put(job.getId(), job.build());
}
allocations.put(jobId, builder.build());
return this;
} }
public Builder setIgnoreDowntime(String jobId) { }
if (jobs.containsKey(jobId) == false) {
throw ExceptionsHelper.missingJobException(jobId);
}
Allocation allocation = allocations.get(jobId); public static PersistentTasksInProgress.PersistentTaskInProgress<?> getTask(String jobId, @Nullable PersistentTasksInProgress tasks) {
if (allocation == null) { if (tasks != null) {
throw new IllegalStateException("[" + jobId + "] no allocation to ignore downtime"); Predicate<PersistentTasksInProgress.PersistentTaskInProgress<?>> p = t -> {
OpenJobAction.Request storedRequest = (OpenJobAction.Request) t.getRequest();
return storedRequest.getJobId().equals(jobId);
};
for (PersistentTasksInProgress.PersistentTaskInProgress<?> task : tasks.findTasks(OpenJobAction.NAME, p)) {
return task;
} }
Allocation.Builder builder = new Allocation.Builder(allocation); }
builder.setIgnoreDowntime(true); return null;
allocations.put(jobId, builder.build()); }
return this;
public static JobState getJobState(String jobId, @Nullable PersistentTasksInProgress tasks) {
PersistentTasksInProgress.PersistentTaskInProgress<?> task = getTask(jobId, tasks);
if (task != null && task.getStatus() != null) {
return (JobState) task.getStatus();
} else {
// If we haven't opened a job than there will be no persistent task, which is the same as if the job was closed
return JobState.CLOSED;
} }
} }

View File

@ -23,10 +23,12 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -116,10 +118,14 @@ public class JobProvider {
private final Client client; private final Client client;
private final int numberOfReplicas; private final int numberOfReplicas;
// Allows us in test mode to disable the delay of shard allocation, so that in tests we don't have to wait for
// for at least a minute for shards to get allocated.
private final TimeValue delayedNodeTimeOutSetting;
public JobProvider(Client client, int numberOfReplicas) { public JobProvider(Client client, int numberOfReplicas, TimeValue delayedNodeTimeOutSetting) {
this.client = Objects.requireNonNull(client); this.client = Objects.requireNonNull(client);
this.numberOfReplicas = numberOfReplicas; this.numberOfReplicas = numberOfReplicas;
this.delayedNodeTimeOutSetting = delayedNodeTimeOutSetting;
} }
/** /**
@ -172,6 +178,7 @@ public class JobProvider {
// least possible burden on Elasticsearch // least possible burden on Elasticsearch
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)
// Sacrifice durability for performance: in the event of power // Sacrifice durability for performance: in the event of power
// failure we can lose the last 5 seconds of changes, but it's // failure we can lose the last 5 seconds of changes, but it's
// much faster // much faster
@ -200,6 +207,7 @@ public class JobProvider {
// least possible burden on Elasticsearch // least possible burden on Elasticsearch
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)
// Sacrifice durability for performance: in the event of power // Sacrifice durability for performance: in the event of power
// failure we can lose the last 5 seconds of changes, but it's // failure we can lose the last 5 seconds of changes, but it's
// much faster // much faster
@ -218,6 +226,7 @@ public class JobProvider {
// least possible burden on Elasticsearch // least possible burden on Elasticsearch
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas)
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)
// We need to allow fields not mentioned in the mappings to // We need to allow fields not mentioned in the mappings to
// pick up default mappings and be used in queries // pick up default mappings and be used in queries
.put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), true); .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), true);

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.job.process.autodetect;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
@ -44,6 +45,7 @@ public class AutodetectCommunicator implements Closeable {
private static final Logger LOGGER = Loggers.getLogger(AutodetectCommunicator.class); private static final Logger LOGGER = Loggers.getLogger(AutodetectCommunicator.class);
private static final Duration FLUSH_PROCESS_CHECK_FREQUENCY = Duration.ofSeconds(1); private static final Duration FLUSH_PROCESS_CHECK_FREQUENCY = Duration.ofSeconds(1);
private final long taskId;
private final Job job; private final Job job;
private final DataCountsReporter dataCountsReporter; private final DataCountsReporter dataCountsReporter;
private final AutodetectProcess autodetectProcess; private final AutodetectProcess autodetectProcess;
@ -52,8 +54,9 @@ public class AutodetectCommunicator implements Closeable {
final AtomicReference<CountDownLatch> inUse = new AtomicReference<>(); final AtomicReference<CountDownLatch> inUse = new AtomicReference<>();
public AutodetectCommunicator(Job job, AutodetectProcess process, DataCountsReporter dataCountsReporter, public AutodetectCommunicator(long taskId, Job job, AutodetectProcess process, DataCountsReporter dataCountsReporter,
AutoDetectResultProcessor autoDetectResultProcessor, Consumer<Exception> handler) { AutoDetectResultProcessor autoDetectResultProcessor, Consumer<Exception> handler) {
this.taskId = taskId;
this.job = job; this.job = job;
this.autodetectProcess = process; this.autodetectProcess = process;
this.dataCountsReporter = dataCountsReporter; this.dataCountsReporter = dataCountsReporter;
@ -86,11 +89,15 @@ public class AutodetectCommunicator implements Closeable {
@Override @Override
public void close() throws IOException { public void close() throws IOException {
close(null);
}
public void close(String errorReason) throws IOException {
checkAndRun(() -> Messages.getMessage(Messages.JOB_DATA_CONCURRENT_USE_CLOSE, job.getId()), () -> { checkAndRun(() -> Messages.getMessage(Messages.JOB_DATA_CONCURRENT_USE_CLOSE, job.getId()), () -> {
dataCountsReporter.close(); dataCountsReporter.close();
autodetectProcess.close(); autodetectProcess.close();
autoDetectResultProcessor.awaitCompletion(); autoDetectResultProcessor.awaitCompletion();
handler.accept(null); handler.accept(errorReason != null ? new ElasticsearchException(errorReason) : null);
return null; return null;
}, true); }, true);
} }
@ -162,6 +169,10 @@ public class AutodetectCommunicator implements Closeable {
return dataCountsReporter.runningTotalStats(); return dataCountsReporter.runningTotalStats();
} }
public long getTaskId() {
return taskId;
}
private <T> T checkAndRun(Supplier<String> errorMessage, CheckedSupplier<T, IOException> callback, boolean wait) throws IOException { private <T> T checkAndRun(Supplier<String> errorMessage, CheckedSupplier<T, IOException> callback, boolean wait) throws IOException {
CountDownLatch latch = new CountDownLatch(1); CountDownLatch latch = new CountDownLatch(1);
if (inUse.compareAndSet(null, latch)) { if (inUse.compareAndSet(null, latch)) {

View File

@ -9,6 +9,7 @@ import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
@ -17,14 +18,12 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.UpdateJobStateAction;
import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.DetectionRule;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.config.MlFilter; import org.elasticsearch.xpack.ml.job.config.MlFilter;
import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig; import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister;
@ -42,6 +41,7 @@ import org.elasticsearch.xpack.ml.job.process.normalizer.Renormalizer;
import org.elasticsearch.xpack.ml.job.process.normalizer.ScoresUpdater; import org.elasticsearch.xpack.ml.job.process.normalizer.ScoresUpdater;
import org.elasticsearch.xpack.ml.job.process.normalizer.ShortCircuitingRenormalizer; import org.elasticsearch.xpack.ml.job.process.normalizer.ShortCircuitingRenormalizer;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.persistent.UpdatePersistentTaskStatusAction;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
@ -116,15 +116,15 @@ public class AutodetectProcessManager extends AbstractComponent {
* @return Count of records, fields, bytes, etc written * @return Count of records, fields, bytes, etc written
*/ */
public DataCounts processData(String jobId, InputStream input, DataLoadParams params) { public DataCounts processData(String jobId, InputStream input, DataLoadParams params) {
Allocation allocation = jobManager.getJobAllocation(jobId); JobState jobState = jobManager.getJobState(jobId);
if (allocation.getState() != JobState.OPENED) { if (jobState != JobState.OPENED) {
throw new IllegalArgumentException("job [" + jobId + "] state is [" + allocation.getState() + "], but must be [" throw new IllegalArgumentException("job [" + jobId + "] state is [" + jobState + "], but must be ["
+ JobState.OPENED + "] for processing data"); + JobState.OPENED + "] for processing data");
} }
AutodetectCommunicator communicator = autoDetectCommunicatorByJob.get(jobId); AutodetectCommunicator communicator = autoDetectCommunicatorByJob.get(jobId);
if (communicator == null) { if (communicator == null) {
throw new IllegalStateException("job [" + jobId + "] with state [" + allocation.getState() + "] hasn't been started"); throw new IllegalStateException("job [" + jobId + "] with state [" + jobState + "] hasn't been started");
} }
try { try {
return communicator.writeToJob(input, params); return communicator.writeToJob(input, params);
@ -188,21 +188,22 @@ public class AutodetectProcessManager extends AbstractComponent {
// TODO check for errors from autodetects // TODO check for errors from autodetects
} }
public void openJob(String jobId, boolean ignoreDowntime, Consumer<Exception> handler) { public void openJob(String jobId, long taskId, boolean ignoreDowntime, Consumer<Exception> handler) {
gatherRequiredInformation(jobId, (dataCounts, modelSnapshot, quantiles, filters) -> { gatherRequiredInformation(jobId, (dataCounts, modelSnapshot, quantiles, filters) -> {
autoDetectCommunicatorByJob.computeIfAbsent(jobId, id -> { try {
AutodetectCommunicator communicator = AutodetectCommunicator communicator = autoDetectCommunicatorByJob.computeIfAbsent(jobId, id ->
create(id, dataCounts, modelSnapshot, quantiles, filters, ignoreDowntime, handler); create(id, taskId, dataCounts, modelSnapshot, quantiles, filters, ignoreDowntime, handler));
try { communicator.writeJobInputHeader();
communicator.writeJobInputHeader(); setJobState(taskId, jobId, JobState.OPENED);
} catch (IOException ioe) { } catch (Exception e1) {
if (e1 instanceof ElasticsearchStatusException) {
logger.info(e1.getMessage());
} else {
String msg = String.format(Locale.ROOT, "[%s] exception while opening job", jobId); String msg = String.format(Locale.ROOT, "[%s] exception while opening job", jobId);
logger.error(msg); logger.error(msg, e1);
throw ExceptionsHelper.serverError(msg, ioe);
} }
setJobState(jobId, JobState.OPENED); setJobState(taskId, JobState.FAILED, e2 -> handler.accept(e1));
return communicator; }
});
}, handler); }, handler);
} }
@ -228,11 +229,11 @@ public class AutodetectProcessManager extends AbstractComponent {
} }
AutodetectCommunicator create(String jobId, DataCounts dataCounts, ModelSnapshot modelSnapshot, Quantiles quantiles, AutodetectCommunicator create(String jobId, long taskId, DataCounts dataCounts, ModelSnapshot modelSnapshot, Quantiles quantiles,
Set<MlFilter> filters, boolean ignoreDowntime, Consumer<Exception> handler) { Set<MlFilter> filters, boolean ignoreDowntime, Consumer<Exception> handler) {
if (autoDetectCommunicatorByJob.size() == maxAllowedRunningJobs) { if (autoDetectCommunicatorByJob.size() == maxAllowedRunningJobs) {
throw new ElasticsearchStatusException("max running job capacity [" + maxAllowedRunningJobs + "] reached", throw new ElasticsearchStatusException("max running job capacity [" + maxAllowedRunningJobs + "] reached",
RestStatus.CONFLICT); RestStatus.TOO_MANY_REQUESTS);
} }
Job job = jobManager.getJobOrThrowIfUnknown(jobId); Job job = jobManager.getJobOrThrowIfUnknown(jobId);
@ -261,26 +262,26 @@ public class AutodetectProcessManager extends AbstractComponent {
} }
throw e; throw e;
} }
return new AutodetectCommunicator(job, process, dataCountsReporter, processor, handler); return new AutodetectCommunicator(taskId, job, process, dataCountsReporter, processor, handler);
} }
} }
/** /**
* Stop the running job and mark it as finished.<br> * Stop the running job and mark it as finished.<br>
* @param jobId The job to stop * @param jobId The job to stop
* * @param errorReason If caused by failure, the reason for closing the job
*/ */
public void closeJob(String jobId) { public void closeJob(String jobId, String errorReason) {
logger.debug("Closing job {}", jobId); logger.debug("Attempting to close job [{}], because [{}]", jobId, errorReason);
AutodetectCommunicator communicator = autoDetectCommunicatorByJob.remove(jobId); AutodetectCommunicator communicator = autoDetectCommunicatorByJob.remove(jobId);
if (communicator == null) { if (communicator == null) {
logger.debug("Cannot close: no active autodetect process for job {}", jobId); logger.debug("Cannot close: no active autodetect process for job {}", jobId);
return; return;
} }
logger.info("Closing job [{}], because [{}]", jobId, errorReason);
try { try {
communicator.close(); communicator.close(errorReason);
setJobState(jobId, JobState.CLOSED);
} catch (Exception e) { } catch (Exception e) {
logger.warn("Exception closing stopped process input stream", e); logger.warn("Exception closing stopped process input stream", e);
throw ExceptionsHelper.serverError("Exception closing stopped process input stream", e); throw ExceptionsHelper.serverError("Exception closing stopped process input stream", e);
@ -303,11 +304,11 @@ public class AutodetectProcessManager extends AbstractComponent {
return Duration.between(communicator.getProcessStartTime(), ZonedDateTime.now()); return Duration.between(communicator.getProcessStartTime(), ZonedDateTime.now());
} }
private void setJobState(String jobId, JobState state) { private void setJobState(long taskId, String jobId, JobState state) {
UpdateJobStateAction.Request request = new UpdateJobStateAction.Request(jobId, state); UpdatePersistentTaskStatusAction.Request request = new UpdatePersistentTaskStatusAction.Request(taskId, state);
client.execute(UpdateJobStateAction.INSTANCE, request, new ActionListener<UpdateJobStateAction.Response>() { client.execute(UpdatePersistentTaskStatusAction.INSTANCE, request, new ActionListener<UpdatePersistentTaskStatusAction.Response>() {
@Override @Override
public void onResponse(UpdateJobStateAction.Response response) { public void onResponse(UpdatePersistentTaskStatusAction.Response response) {
if (response.isAcknowledged()) { if (response.isAcknowledged()) {
logger.info("Successfully set job state to [{}] for job [{}]", state, jobId); logger.info("Successfully set job state to [{}] for job [{}]", state, jobId);
} else { } else {
@ -322,9 +323,16 @@ public class AutodetectProcessManager extends AbstractComponent {
}); });
} }
public void setJobState(String jobId, JobState state, Consumer<Void> handler, Consumer<Exception> errorHandler) { public void setJobState(long taskId, JobState state, CheckedConsumer<Exception, IOException> handler) {
UpdateJobStateAction.Request request = new UpdateJobStateAction.Request(jobId, state); UpdatePersistentTaskStatusAction.Request request = new UpdatePersistentTaskStatusAction.Request(taskId, state);
client.execute(UpdateJobStateAction.INSTANCE, request, ActionListener.wrap(r -> handler.accept(null), errorHandler)); client.execute(UpdatePersistentTaskStatusAction.INSTANCE, request,
ActionListener.wrap(r -> handler.accept(null), e -> {
try {
handler.accept(e);
} catch (IOException e1) {
logger.warn("Error while delagating exception [" + e.getMessage() + "]", e1);
}
}));
} }
public Optional<Tuple<DataCounts, ModelSizeStats>> getStatistics(String jobId) { public Optional<Tuple<DataCounts, ModelSizeStats>> getStatistics(String jobId) {

View File

@ -5,8 +5,6 @@
*/ */
package org.elasticsearch.xpack.ml.job.process.autodetect; package org.elasticsearch.xpack.ml.job.process.autodetect;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.DetectionRule;
import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig; import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig;
import org.elasticsearch.xpack.ml.job.process.autodetect.output.FlushAcknowledgement; import org.elasticsearch.xpack.ml.job.process.autodetect.output.FlushAcknowledgement;
@ -30,8 +28,8 @@ import java.util.concurrent.BlockingQueue;
*/ */
public class BlackHoleAutodetectProcess implements AutodetectProcess { public class BlackHoleAutodetectProcess implements AutodetectProcess {
private static final Logger LOGGER = Loggers.getLogger(BlackHoleAutodetectProcess.class);
private static final String FLUSH_ID = "flush-1"; private static final String FLUSH_ID = "flush-1";
private static final AutodetectResult EMPTY = new AutodetectResult(null, null, null, null, null, null, null, null, null);
private final ZonedDateTime startTime; private final ZonedDateTime startTime;
@ -76,6 +74,7 @@ public class BlackHoleAutodetectProcess implements AutodetectProcess {
@Override @Override
public void close() throws IOException { public void close() throws IOException {
results.add(EMPTY);
} }
@Override @Override
@ -89,10 +88,11 @@ public class BlackHoleAutodetectProcess implements AutodetectProcess {
public boolean hasNext() { public boolean hasNext() {
try { try {
result = results.take(); result = results.take();
return result != EMPTY;
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
return false;
} }
return true;
} }
@Override @Override

View File

@ -173,7 +173,6 @@ public class AutoDetectResultProcessor {
completionLatch.await(); completionLatch.await();
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
throw new RuntimeException(e);
} }
} }

View File

@ -46,7 +46,7 @@ public class AutodetectResultsParser extends AbstractComponent {
} }
} }
private void consumeAndCloseStream(InputStream in) { static void consumeAndCloseStream(InputStream in) {
try { try {
// read anything left in the stream before // read anything left in the stream before
// closing the stream otherwise if the process // closing the stream otherwise if the process
@ -58,7 +58,7 @@ public class AutodetectResultsParser extends AbstractComponent {
} }
in.close(); in.close();
} catch (IOException e) { } catch (IOException e) {
logger.warn("Error closing result parser input stream", e); throw new RuntimeException("Error closing result parser input stream", e);
} }
} }
@ -79,13 +79,16 @@ public class AutodetectResultsParser extends AbstractComponent {
try { try {
token = parser.nextToken(); token = parser.nextToken();
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchParseException(e.getMessage(), e); logger.debug("io error while parsing", e);
consumeAndCloseStream(in);
return false;
} }
if (token == XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.END_ARRAY) {
consumeAndCloseStream(in); consumeAndCloseStream(in);
return false; return false;
} else if (token != XContentParser.Token.START_OBJECT) { } else if (token != XContentParser.Token.START_OBJECT) {
logger.error("Expecting Json Field name token after the Start Object token"); logger.error("Expecting Json Field name token after the Start Object token");
consumeAndCloseStream(in);
throw new ElasticsearchParseException("unexpected token [" + token + "]"); throw new ElasticsearchParseException("unexpected token [" + token + "]");
} }
return true; return true;

View File

@ -29,8 +29,8 @@ public class RestCloseJobAction extends BaseRestHandler {
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
CloseJobAction.Request request = new CloseJobAction.Request(restRequest.param(Job.ID.getPreferredName())); CloseJobAction.Request request = new CloseJobAction.Request(restRequest.param(Job.ID.getPreferredName()));
if (restRequest.hasParam("close_timeout")) { if (restRequest.hasParam("timeout")) {
request.setCloseTimeout(TimeValue.parseTimeValue(restRequest.param("close_timeout"), "close_timeout")); request.setTimeout(TimeValue.parseTimeValue(restRequest.param("timeout"), "timeout"));
} }
return channel -> client.execute(CloseJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); return channel -> client.execute(CloseJobAction.INSTANCE, request, new RestToXContentListener<>(channel));
} }

View File

@ -8,14 +8,18 @@ package org.elasticsearch.xpack.ml.rest.job;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.RestBuilderListener;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.OpenJobAction; import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PostDataAction;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.persistent.PersistentActionResponse;
import java.io.IOException; import java.io.IOException;
@ -29,14 +33,28 @@ public class RestOpenJobAction extends BaseRestHandler {
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
OpenJobAction.Request request = new OpenJobAction.Request(restRequest.param(Job.ID.getPreferredName())); OpenJobAction.Request request;
request.setIgnoreDowntime(restRequest.paramAsBoolean(OpenJobAction.Request.IGNORE_DOWNTIME.getPreferredName(), false)); if (restRequest.hasContentOrSourceParam()) {
if (restRequest.hasParam("open_timeout")) { request = OpenJobAction.Request.parseRequest(restRequest.param(Job.ID.getPreferredName()), restRequest.contentParser());
TimeValue openTimeout = restRequest.paramAsTime("open_timeout", TimeValue.timeValueSeconds(30)); } else {
request.setOpenTimeout(openTimeout); request = new OpenJobAction.Request(restRequest.param(Job.ID.getPreferredName()));
request.setIgnoreDowntime(restRequest.paramAsBoolean(OpenJobAction.Request.IGNORE_DOWNTIME.getPreferredName(), false));
if (restRequest.hasParam("timeout")) {
TimeValue openTimeout = restRequest.paramAsTime("timeout", TimeValue.timeValueSeconds(30));
request.setTimeout(openTimeout);
}
} }
return channel -> { return channel -> {
client.execute(OpenJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); client.execute(OpenJobAction.INSTANCE, request, new RestBuilderListener<PersistentActionResponse>(channel) {
@Override
public RestResponse buildResponse(PersistentActionResponse r, XContentBuilder builder) throws Exception {
builder.startObject();
builder.field("opened", true);
builder.endObject();
return new BytesRestResponse(RestStatus.OK, builder);
}
});
}; };
} }
} }

View File

@ -6,15 +6,17 @@
package org.elasticsearch.xpack.ml.utils; package org.elasticsearch.xpack.ml.utils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Predicate; import java.util.function.Predicate;
@ -38,7 +40,12 @@ public class JobStateObserver {
observer.waitForNextChange(new ClusterStateObserver.Listener() { observer.waitForNextChange(new ClusterStateObserver.Listener() {
@Override @Override
public void onNewClusterState(ClusterState state) { public void onNewClusterState(ClusterState state) {
handler.accept(null); if (jobStatePredicate.failed) {
handler.accept(new ElasticsearchStatusException("[" + jobId + "] expected state [" + JobState.OPENED +
"] but got [" + JobState.FAILED +"]", RestStatus.CONFLICT));
} else {
handler.accept(null);
}
} }
@Override @Override
@ -51,7 +58,12 @@ public class JobStateObserver {
@Override @Override
public void onTimeout(TimeValue timeout) { public void onTimeout(TimeValue timeout) {
if (jobStatePredicate.test(clusterService.state())) { if (jobStatePredicate.test(clusterService.state())) {
handler.accept(null); if (jobStatePredicate.failed) {
handler.accept(new ElasticsearchStatusException("[" + jobId + "] expected state [" + JobState.OPENED +
"] but got [" + JobState.FAILED +"]", RestStatus.CONFLICT));
} else {
handler.accept(null);
}
} else { } else {
Exception e = new IllegalArgumentException("Timeout expired while waiting for job state to change to [" Exception e = new IllegalArgumentException("Timeout expired while waiting for job state to change to ["
+ expectedState + "]"); + expectedState + "]");
@ -66,6 +78,8 @@ public class JobStateObserver {
private final String jobId; private final String jobId;
private final JobState expectedState; private final JobState expectedState;
private volatile boolean failed;
JobStatePredicate(String jobId, JobState expectedState) { JobStatePredicate(String jobId, JobState expectedState) {
this.jobId = jobId; this.jobId = jobId;
this.expectedState = expectedState; this.expectedState = expectedState;
@ -73,14 +87,14 @@ public class JobStateObserver {
@Override @Override
public boolean test(ClusterState newState) { public boolean test(ClusterState newState) {
MlMetadata metadata = newState.getMetaData().custom(MlMetadata.TYPE); PersistentTasksInProgress tasks = newState.custom(PersistentTasksInProgress.TYPE);
if (metadata != null) { JobState jobState = MlMetadata.getJobState(jobId, tasks);
Allocation allocation = metadata.getAllocations().get(jobId); if (jobState == JobState.FAILED) {
if (allocation != null) { failed = true;
return allocation.getState() == expectedState; return true;
} } else {
return jobState == expectedState;
} }
return false;
} }
} }

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.persistent;
import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier; import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateListener;
@ -19,11 +20,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskCancelledException;
import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.transport.TransportResponse.Empty; import org.elasticsearch.transport.TransportResponse.Empty;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
@ -211,7 +212,11 @@ public class PersistentActionCoordinator extends AbstractComponent implements Cl
"cancelled task {} failed with an exception, cancellation reason [{}]", "cancelled task {} failed with an exception, cancellation reason [{}]",
task.getId(), task.getTask().getReasonCancelled()), e); task.getId(), task.getTask().getReasonCancelled()), e);
} }
startCompletionNotification(task, null); if (CancelTasksRequest.DEFAULT_REASON.equals(task.getTask().getReasonCancelled())) {
startCompletionNotification(task, null);
} else {
startCompletionNotification(task, e);
}
} else { } else {
startCompletionNotification(task, e); startCompletionNotification(task, e);
} }

View File

@ -13,8 +13,10 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool;
/** /**
* Service responsible for executing restartable actions that can survive disappearance of a coordinating and executor nodes. * Service responsible for executing restartable actions that can survive disappearance of a coordinating and executor nodes.
@ -22,11 +24,13 @@ import org.elasticsearch.tasks.TaskId;
public class PersistentActionService extends AbstractComponent { public class PersistentActionService extends AbstractComponent {
private final Client client; private final Client client;
private final ThreadPool threadPool;
private final ClusterService clusterService; private final ClusterService clusterService;
public PersistentActionService(Settings settings, ClusterService clusterService, Client client) { public PersistentActionService(Settings settings, ThreadPool threadPool, ClusterService clusterService, Client client) {
super(settings); super(settings);
this.client = client; this.client = client;
this.threadPool = threadPool;
this.clusterService = clusterService; this.clusterService = clusterService;
} }
@ -43,8 +47,20 @@ public class PersistentActionService extends AbstractComponent {
public void sendCompletionNotification(long taskId, Exception failure, public void sendCompletionNotification(long taskId, Exception failure,
ActionListener<CompletionPersistentTaskAction.Response> listener) { ActionListener<CompletionPersistentTaskAction.Response> listener) {
CompletionPersistentTaskAction.Request restartRequest = new CompletionPersistentTaskAction.Request(taskId, failure); CompletionPersistentTaskAction.Request restartRequest = new CompletionPersistentTaskAction.Request(taskId, failure);
// Need to fork otherwise: java.lang.AssertionError: should not be called by a cluster state applier.
// reason [the applied cluster state is not yet available])
try { try {
client.execute(CompletionPersistentTaskAction.INSTANCE, restartRequest, listener); threadPool.executor(ThreadPool.Names.GENERIC).execute(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
@Override
protected void doRun() throws Exception {
client.execute(CompletionPersistentTaskAction.INSTANCE, restartRequest, listener);
}
});
} catch (Exception e) { } catch (Exception e) {
listener.onFailure(e); listener.onFailure(e);
} }

View File

@ -7,7 +7,6 @@ package org.elasticsearch.xpack.persistent;
import org.elasticsearch.common.inject.Provider; import org.elasticsearch.common.inject.Provider;
import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
/** /**
@ -27,6 +26,15 @@ public class PersistentTask extends CancellableTask {
return true; return true;
} }
// In case of persistent tasks we always need to return: `false`
// because in case of persistent task the parent task isn't a task in the task manager, but in cluster state.
// This instructs the task manager not to try to kill this persistent task when the task manager cannot find
// a fake parent node id "cluster" in the cluster state
@Override
public final boolean cancelOnParentLeaving() {
return false;
}
@Override @Override
public Status getStatus() { public Status getStatus() {
Provider<Status> statusProvider = this.statusProvider; Provider<Status> statusProvider = this.statusProvider;

View File

@ -109,7 +109,7 @@ public final class PersistentTasksInProgress extends AbstractNamedDiffable<Clust
public PersistentTaskInProgress(PersistentTaskInProgress<Request> persistentTaskInProgress, String newExecutorNode) { public PersistentTaskInProgress(PersistentTaskInProgress<Request> persistentTaskInProgress, String newExecutorNode) {
this(persistentTaskInProgress.id, persistentTaskInProgress.allocationId + 1L, this(persistentTaskInProgress.id, persistentTaskInProgress.allocationId + 1L,
persistentTaskInProgress.action, persistentTaskInProgress.request, null, newExecutorNode); persistentTaskInProgress.action, persistentTaskInProgress.request, persistentTaskInProgress.status, newExecutorNode);
} }
public PersistentTaskInProgress(PersistentTaskInProgress<Request> persistentTaskInProgress, Status status) { public PersistentTaskInProgress(PersistentTaskInProgress<Request> persistentTaskInProgress, Status status) {

View File

@ -6,16 +6,17 @@
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.action.CloseJobAction.Request; import org.elasticsearch.xpack.ml.action.CloseJobAction.Request;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
public class CloseJobActionRequestTests extends AbstractStreamableTestCase<Request> { public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCase<Request> {
@Override @Override
protected Request createTestInstance() { protected Request createTestInstance() {
Request request = new Request(randomAsciiOfLengthBetween(1, 20)); Request request = new Request(randomAsciiOfLengthBetween(1, 20));
if (randomBoolean()) { if (randomBoolean()) {
request.setCloseTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); request.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong()));
} }
return request; return request;
} }
@ -24,4 +25,9 @@ public class CloseJobActionRequestTests extends AbstractStreamableTestCase<Reque
protected Request createBlankInstance() { protected Request createBlankInstance() {
return new Request(); return new Request();
} }
@Override
protected Request parseInstance(XContentParser parser) {
return Request.parseRequest(null, parser);
}
} }

View File

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import java.util.Collections;
import static org.elasticsearch.xpack.ml.job.config.JobTests.buildJobBuilder;
public class CloseJobActionTests extends ESTestCase {
public void testMoveJobToClosingState() {
MlMetadata.Builder mlBuilder = new MlMetadata.Builder();
mlBuilder.putJob(buildJobBuilder("job_id").build(), false);
PersistentTaskInProgress<OpenJobAction.Request> task =
new PersistentTaskInProgress<>(1L, OpenJobAction.NAME, new OpenJobAction.Request("job_id"), null);
task = new PersistentTaskInProgress<>(task, JobState.OPENED);
ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name"))
.metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()))
.putCustom(PersistentTasksInProgress.TYPE, new PersistentTasksInProgress(1L, Collections.singletonMap(1L, task)));
ClusterState result = CloseJobAction.moveJobToClosingState("job_id", csBuilder.build());
PersistentTasksInProgress actualTasks = result.custom(PersistentTasksInProgress.TYPE);
assertEquals(JobState.CLOSING, actualTasks.getTask(1L).getStatus());
MlMetadata actualMetadata = result.metaData().custom(MlMetadata.TYPE);
assertNotNull(actualMetadata.getJobs().get("job_id").getFinishedTime());
}
public void testMoveJobToClosingState_jobMissing() {
MlMetadata.Builder mlBuilder = new MlMetadata.Builder();
ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name"))
.metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()))
.putCustom(PersistentTasksInProgress.TYPE, new PersistentTasksInProgress(1L, Collections.emptyMap()));
expectThrows(ResourceNotFoundException.class, () -> CloseJobAction.moveJobToClosingState("job_id", csBuilder.build()));
}
public void testMoveJobToClosingState_unexpectedJobState() {
MlMetadata.Builder mlBuilder = new MlMetadata.Builder();
mlBuilder.putJob(buildJobBuilder("job_id").build(), false);
PersistentTaskInProgress<OpenJobAction.Request> task =
new PersistentTaskInProgress<>(1L, OpenJobAction.NAME, new OpenJobAction.Request("job_id"), null);
task = new PersistentTaskInProgress<>(task, JobState.OPENING);
ClusterState.Builder csBuilder1 = ClusterState.builder(new ClusterName("_name"))
.metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()))
.putCustom(PersistentTasksInProgress.TYPE, new PersistentTasksInProgress(1L, Collections.singletonMap(1L, task)));
ElasticsearchStatusException result =
expectThrows(ElasticsearchStatusException.class, () -> CloseJobAction.moveJobToClosingState("job_id", csBuilder1.build()));
assertEquals("cannot close job, expected job state [opened], but got [opening]", result.getMessage());
ClusterState.Builder csBuilder2 = ClusterState.builder(new ClusterName("_name"))
.metaData(new MetaData.Builder().putCustom(MlMetadata.TYPE, mlBuilder.build()))
.putCustom(PersistentTasksInProgress.TYPE, new PersistentTasksInProgress(1L, Collections.emptyMap()));
result = expectThrows(ElasticsearchStatusException.class, () -> CloseJobAction.moveJobToClosingState("job_id", csBuilder2.build()));
assertEquals("cannot close job, expected job state [opened], but got [closed]", result.getMessage());
}
}

View File

@ -5,7 +5,6 @@
*/ */
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder;
@ -13,19 +12,9 @@ import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.ml.datafeed.DatafeedState;
import org.elasticsearch.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.ml.job.config.AnalysisConfig;
@ -33,52 +22,32 @@ import org.elasticsearch.xpack.ml.job.config.DataDescription;
import org.elasticsearch.xpack.ml.job.config.Detector; import org.elasticsearch.xpack.ml.job.config.Detector;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.security.authc.support.SecuredString; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase;
import org.elasticsearch.xpack.persistent.PersistentActionResponse; import org.elasticsearch.xpack.persistent.PersistentActionResponse;
import org.elasticsearch.xpack.persistent.RemovePersistentTaskAction; import org.elasticsearch.xpack.persistent.RemovePersistentTaskAction;
import org.junit.After; import org.junit.After;
import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static org.elasticsearch.xpack.ml.integration.TooManyJobsIT.ensureClusterStateConsistencyWorkAround;
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ESIntegTestCase.ClusterScope(numDataNodes = 1) public class DatafeedJobsIT extends BaseMlIntegTestCase {
public class DatafeedJobsIT extends SecurityIntegTestCase {
@Override @Before
protected Settings externalClusterClientSettings() { public void startNode() {
return Settings.builder().put(super.externalClusterClientSettings()).put("transport.type", "security4") internalCluster().ensureAtLeastNumDataNodes(1);
.put(MlPlugin.ML_ENABLED.getKey(), true)
.put(ThreadContext.PREFIX + ".Authorization", basicAuthHeaderValue("elastic", new SecuredString("changeme".toCharArray())))
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(XPackPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
} }
@After @After
public void clearMlMetadata() throws Exception { public void stopNode() throws Exception {
clearMlMetadata(client()); cleanupWorkaround(1);
} }
public void testLookbackOnly() throws Exception { public void testLookbackOnly() throws Exception {
@ -97,11 +66,11 @@ public class DatafeedJobsIT extends SecurityIntegTestCase {
long numDocs2 = randomIntBetween(32, 2048); long numDocs2 = randomIntBetween(32, 2048);
indexDocs("data-2", numDocs2, oneWeekAgo, now); indexDocs("data-2", numDocs2, oneWeekAgo, now);
Job.Builder job = createJob("lookback-job"); Job.Builder job = createScheduledJob("lookback-job");
PutJobAction.Request putJobRequest = new PutJobAction.Request(job.build(true, job.getId())); PutJobAction.Request putJobRequest = new PutJobAction.Request(job.build(true, job.getId()));
PutJobAction.Response putJobResponse = client().execute(PutJobAction.INSTANCE, putJobRequest).get(); PutJobAction.Response putJobResponse = client().execute(PutJobAction.INSTANCE, putJobRequest).get();
assertTrue(putJobResponse.isAcknowledged()); assertTrue(putJobResponse.isAcknowledged());
client().execute(InternalOpenJobAction.INSTANCE, new InternalOpenJobAction.Request(job.getId())); client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId()));
assertBusy(() -> { assertBusy(() -> {
GetJobsStatsAction.Response statsResponse = GetJobsStatsAction.Response statsResponse =
client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet(); client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet();
@ -137,11 +106,11 @@ public class DatafeedJobsIT extends SecurityIntegTestCase {
long lastWeek = now - 604800000; long lastWeek = now - 604800000;
indexDocs("data", numDocs1, lastWeek, now); indexDocs("data", numDocs1, lastWeek, now);
Job.Builder job = createJob("realtime-job"); Job.Builder job = createScheduledJob("realtime-job");
PutJobAction.Request putJobRequest = new PutJobAction.Request(job.build(true, job.getId())); PutJobAction.Request putJobRequest = new PutJobAction.Request(job.build(true, job.getId()));
PutJobAction.Response putJobResponse = client().execute(PutJobAction.INSTANCE, putJobRequest).get(); PutJobAction.Response putJobResponse = client().execute(PutJobAction.INSTANCE, putJobRequest).get();
assertTrue(putJobResponse.isAcknowledged()); assertTrue(putJobResponse.isAcknowledged());
client().execute(InternalOpenJobAction.INSTANCE, new InternalOpenJobAction.Request(job.getId())); client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId()));
assertBusy(() -> { assertBusy(() -> {
GetJobsStatsAction.Response statsResponse = GetJobsStatsAction.Response statsResponse =
client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet(); client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet();
@ -207,7 +176,7 @@ public class DatafeedJobsIT extends SecurityIntegTestCase {
logger.info("Indexed [{}] documents", numDocs); logger.info("Indexed [{}] documents", numDocs);
} }
private Job.Builder createJob(String jobId) { private Job.Builder createScheduledJob(String jobId) {
DataDescription.Builder dataDescription = new DataDescription.Builder(); DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setFormat(DataDescription.DataFormat.JSON); dataDescription.setFormat(DataDescription.DataFormat.JSON);
dataDescription.setTimeFormat("yyyy-MM-dd HH:mm:ss"); dataDescription.setTimeFormat("yyyy-MM-dd HH:mm:ss");
@ -246,61 +215,4 @@ public class DatafeedJobsIT extends SecurityIntegTestCase {
} }
} }
public static void clearMlMetadata(Client client) throws Exception {
deleteAllDatafeeds(client);
deleteAllJobs(client);
}
private static void deleteAllDatafeeds(Client client) throws Exception {
MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData();
MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE);
for (DatafeedConfig datafeed : mlMetadata.getDatafeeds().values()) {
String datafeedId = datafeed.getId();
try {
RemovePersistentTaskAction.Response stopResponse =
client.execute(StopDatafeedAction.INSTANCE, new StopDatafeedAction.Request(datafeedId)).get();
assertTrue(stopResponse.isAcknowledged());
} catch (ExecutionException e) {
// CONFLICT is ok, as it means the datafeed has already stopped, which isn't an issue at all.
if (RestStatus.CONFLICT != ExceptionsHelper.status(e.getCause())) {
throw new RuntimeException(e);
}
}
assertBusy(() -> {
try {
GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(datafeedId);
GetDatafeedsStatsAction.Response r = client.execute(GetDatafeedsStatsAction.INSTANCE, request).get();
assertThat(r.getResponse().results().get(0).getDatafeedState(), equalTo(DatafeedState.STOPPED));
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
});
DeleteDatafeedAction.Response deleteResponse =
client.execute(DeleteDatafeedAction.INSTANCE, new DeleteDatafeedAction.Request(datafeedId)).get();
assertTrue(deleteResponse.isAcknowledged());
}
}
public static void deleteAllJobs(Client client) throws Exception {
MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData();
MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE);
for (Map.Entry<String, Job> entry : mlMetadata.getJobs().entrySet()) {
String jobId = entry.getKey();
try {
CloseJobAction.Response response =
client.execute(CloseJobAction.INSTANCE, new CloseJobAction.Request(jobId)).get();
assertTrue(response.isClosed());
} catch (Exception e) {
// ignore
}
DeleteJobAction.Response response =
client.execute(DeleteJobAction.INSTANCE, new DeleteJobAction.Request(jobId)).get();
assertTrue(response.isAcknowledged());
}
}
@Override
protected void ensureClusterStateConsistency() throws IOException {
ensureClusterStateConsistencyWorkAround();
}
} }

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetJobsAction.Response; import org.elasticsearch.xpack.ml.action.GetJobsAction.Response;
import org.elasticsearch.xpack.ml.action.util.QueryPage;
import org.elasticsearch.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.ml.job.config.AnalysisLimits;
import org.elasticsearch.xpack.ml.job.config.DataDescription; import org.elasticsearch.xpack.ml.job.config.DataDescription;
@ -13,7 +14,6 @@ import org.elasticsearch.xpack.ml.job.config.Detector;
import org.elasticsearch.xpack.ml.job.config.IgnoreDowntime; import org.elasticsearch.xpack.ml.job.config.IgnoreDowntime;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig; import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig;
import org.elasticsearch.xpack.ml.action.util.QueryPage;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.ArrayList; import java.util.ArrayList;
@ -54,7 +54,7 @@ public class GetJobsActionResponseTests extends AbstractStreamableTestCase<GetJo
Job job = new Job(jobId, description, createTime, finishedTime, lastDataTime, Job job = new Job(jobId, description, createTime, finishedTime, lastDataTime,
analysisConfig, analysisLimits, dataDescription, analysisConfig, analysisLimits, dataDescription,
modelDebugConfig, ignoreDowntime, normalizationWindowDays, backgroundPersistInterval, modelDebugConfig, ignoreDowntime, normalizationWindowDays, backgroundPersistInterval,
modelSnapshotRetentionDays, resultsRetentionDays, customConfig, modelSnapshotId, indexName); modelSnapshotRetentionDays, resultsRetentionDays, customConfig, modelSnapshotId, indexName, randomBoolean());
jobList.add(job); jobList.add(job);
} }

View File

@ -6,16 +6,17 @@
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.action.OpenJobAction.Request; import org.elasticsearch.xpack.ml.action.OpenJobAction.Request;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
public class OpenJobActionRequestTests extends AbstractStreamableTestCase<Request> { public class OpenJobActionRequestTests extends AbstractStreamableXContentTestCase<Request> {
@Override @Override
protected Request createTestInstance() { protected Request createTestInstance() {
Request request = new Request(randomAsciiOfLengthBetween(1, 20)); Request request = new Request(randomAsciiOfLengthBetween(1, 20));
if (randomBoolean()) { if (randomBoolean()) {
request.setOpenTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); request.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong()));
} }
return request; return request;
} }
@ -24,4 +25,9 @@ public class OpenJobActionRequestTests extends AbstractStreamableTestCase<Reques
protected Request createBlankInstance() { protected Request createBlankInstance() {
return new Request(); return new Request();
} }
@Override
protected Request parseInstance(XContentParser parser) {
return Request.parseRequest(null, parser);
}
} }

View File

@ -1,22 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.OpenJobAction.Response;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class OpenJobActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
return new Response(randomBoolean());
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -0,0 +1,95 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import java.net.InetAddress;
import java.util.Collections;
import static org.elasticsearch.xpack.ml.job.config.JobTests.buildJobBuilder;
public class OpenJobActionTests extends ESTestCase {
public void testValidate() {
MlMetadata.Builder mlBuilder = new MlMetadata.Builder();
mlBuilder.putJob(buildJobBuilder("job_id").build(), false);
DiscoveryNodes nodes = DiscoveryNodes.builder()
.add(new DiscoveryNode("_node_name", "_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9300),
Collections.emptyMap(), Collections.emptySet(), Version.CURRENT))
.build();
PersistentTaskInProgress<OpenJobAction.Request> task =
new PersistentTaskInProgress<>(1L, OpenJobAction.NAME, new OpenJobAction.Request("job_id"), "_node_id");
task = new PersistentTaskInProgress<>(task, randomFrom(JobState.CLOSED, JobState.FAILED));
PersistentTasksInProgress tasks = new PersistentTasksInProgress(1L, Collections.singletonMap(1L, task));
OpenJobAction.validate("job_id", mlBuilder.build(), tasks, nodes);
OpenJobAction.validate("job_id", mlBuilder.build(), new PersistentTasksInProgress(1L, Collections.emptyMap()), nodes);
OpenJobAction.validate("job_id", mlBuilder.build(), null, nodes);
task = new PersistentTaskInProgress<>(1L, OpenJobAction.NAME, new OpenJobAction.Request("job_id"), "_other_node_id");
task = new PersistentTaskInProgress<>(task, JobState.OPENED);
tasks = new PersistentTasksInProgress(1L, Collections.singletonMap(1L, task));
OpenJobAction.validate("job_id", mlBuilder.build(), tasks, nodes);
}
public void testValidate_jobMissing() {
MlMetadata.Builder mlBuilder = new MlMetadata.Builder();
mlBuilder.putJob(buildJobBuilder("job_id1").build(), false);
expectThrows(ResourceNotFoundException.class, () -> OpenJobAction.validate("job_id2", mlBuilder.build(), null, null));
}
public void testValidate_jobMarkedAsDeleted() {
MlMetadata.Builder mlBuilder = new MlMetadata.Builder();
Job.Builder jobBuilder = buildJobBuilder("job_id");
jobBuilder.setDeleted(true);
mlBuilder.putJob(jobBuilder.build(), false);
Exception e = expectThrows(ElasticsearchStatusException.class,
() -> OpenJobAction.validate("job_id", mlBuilder.build(), null, null));
assertEquals("Cannot open job [job_id] because it has been marked as deleted", e.getMessage());
}
public void testValidate_unexpectedState() {
MlMetadata.Builder mlBuilder = new MlMetadata.Builder();
mlBuilder.putJob(buildJobBuilder("job_id").build(), false);
DiscoveryNodes nodes = DiscoveryNodes.builder()
.add(new DiscoveryNode("_node_name", "_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9300),
Collections.emptyMap(), Collections.emptySet(), Version.CURRENT))
.build();
PersistentTaskInProgress<OpenJobAction.Request> task =
new PersistentTaskInProgress<>(1L, OpenJobAction.NAME, new OpenJobAction.Request("job_id"), "_node_id");
JobState jobState = randomFrom(JobState.OPENING, JobState.OPENED, JobState.CLOSING);
task = new PersistentTaskInProgress<>(task, jobState);
PersistentTasksInProgress tasks1 = new PersistentTasksInProgress(1L, Collections.singletonMap(1L, task));
Exception e = expectThrows(ElasticsearchStatusException.class,
() -> OpenJobAction.validate("job_id", mlBuilder.build(), tasks1, nodes));
assertEquals("[job_id] expected state [closed] or [failed], but got [" + jobState +"]", e.getMessage());
task = new PersistentTaskInProgress<>(1L, OpenJobAction.NAME, new OpenJobAction.Request("job_id"), "_other_node_id");
jobState = randomFrom(JobState.OPENING, JobState.CLOSING);
task = new PersistentTaskInProgress<>(task, jobState);
PersistentTasksInProgress tasks2 = new PersistentTasksInProgress(1L, Collections.singletonMap(1L, task));
e = expectThrows(ElasticsearchStatusException.class,
() -> OpenJobAction.validate("job_id", mlBuilder.build(), tasks2, nodes));
assertEquals("[job_id] expected state [closed] or [failed], but got [" + jobState +"]", e.getMessage());
}
}

View File

@ -14,6 +14,10 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import java.util.Collections;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
@ -44,15 +48,19 @@ public class StartDatafeedActionRequestTests extends AbstractStreamableXContentT
.putJob(job1, false) .putJob(job1, false)
.build(); .build();
Exception e = expectThrows(ResourceNotFoundException.class, Exception e = expectThrows(ResourceNotFoundException.class,
() -> StartDatafeedAction.validate("some-datafeed", mlMetadata1)); () -> StartDatafeedAction.validate("some-datafeed", mlMetadata1,
new PersistentTasksInProgress(0L, Collections.emptyMap())));
assertThat(e.getMessage(), equalTo("No datafeed with id [some-datafeed] exists")); assertThat(e.getMessage(), equalTo("No datafeed with id [some-datafeed] exists"));
PersistentTaskInProgress<OpenJobAction.Request> task =
new PersistentTaskInProgress<>(0L, OpenJobAction.NAME, new OpenJobAction.Request("foo"), null);
PersistentTasksInProgress tasks = new PersistentTasksInProgress(0L, Collections.singletonMap(0L, task));
DatafeedConfig datafeedConfig1 = DatafeedJobRunnerTests.createDatafeedConfig("foo-datafeed", "foo").build(); DatafeedConfig datafeedConfig1 = DatafeedJobRunnerTests.createDatafeedConfig("foo-datafeed", "foo").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1)
.putDatafeed(datafeedConfig1) .putDatafeed(datafeedConfig1)
.build(); .build();
e = expectThrows(ElasticsearchStatusException.class, e = expectThrows(ElasticsearchStatusException.class,
() -> StartDatafeedAction.validate("foo-datafeed", mlMetadata2)); () -> StartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks));
assertThat(e.getMessage(), equalTo("cannot start datafeed, expected job state [opened], but got [closed]")); assertThat(e.getMessage(), equalTo("cannot start datafeed, expected job state [opened], but got [closed]"));
} }

View File

@ -1,23 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.UpdateJobStateAction.Request;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class UpdateJobStateRequestTests extends AbstractStreamableTestCase<Request> {
@Override
protected Request createTestInstance() {
return new Request(randomAsciiOfLengthBetween(1, 20), randomFrom(JobState.values()));
}
@Override
protected Request createBlankInstance() {
return new Request();
}
}

View File

@ -30,7 +30,6 @@ import org.elasticsearch.xpack.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.config.DataDescription; import org.elasticsearch.xpack.ml.job.config.DataDescription;
import org.elasticsearch.xpack.ml.job.config.Detector; import org.elasticsearch.xpack.ml.job.config.Detector;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
@ -47,7 +46,6 @@ import java.util.concurrent.ExecutorService;
import java.util.function.Consumer; import java.util.function.Consumer;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.same; import static org.mockito.Matchers.same;
@ -122,7 +120,6 @@ public class DatafeedJobRunnerTests extends ESTestCase {
MlMetadata mlMetadata = new MlMetadata.Builder() MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false) .putJob(job, false)
.putDatafeed(datafeedConfig) .putDatafeed(datafeedConfig)
.updateState("foo", JobState.OPENED, null)
.build(); .build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name")) when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
.metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata)) .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata))
@ -161,7 +158,6 @@ public class DatafeedJobRunnerTests extends ESTestCase {
MlMetadata mlMetadata = new MlMetadata.Builder() MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false) .putJob(job, false)
.putDatafeed(datafeedConfig) .putDatafeed(datafeedConfig)
.updateState("foo", JobState.OPENED, null)
.build(); .build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name")) when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
.metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata)) .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata))
@ -190,7 +186,6 @@ public class DatafeedJobRunnerTests extends ESTestCase {
MlMetadata mlMetadata = new MlMetadata.Builder() MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false) .putJob(job, false)
.putDatafeed(datafeedConfig) .putDatafeed(datafeedConfig)
.updateState("foo", JobState.OPENED, null)
.build(); .build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name")) when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
.metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata)) .metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata))

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -42,8 +43,8 @@ import org.elasticsearch.xpack.ml.job.results.ModelDebugOutputTests;
import org.junit.Before; import org.junit.Before;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.EnumSet; import java.util.EnumSet;
@ -80,7 +81,7 @@ public class AutodetectResultProcessorIT extends ESSingleNodeTestCase {
public void createComponents() { public void createComponents() {
renormalizer = new NoOpRenormalizer(); renormalizer = new NoOpRenormalizer();
jobResultsPersister = new JobResultsPersister(nodeSettings(), client()); jobResultsPersister = new JobResultsPersister(nodeSettings(), client());
jobProvider = new JobProvider(client(), 1); jobProvider = new JobProvider(client(), 1, TimeValue.timeValueSeconds(1));
} }
public void testProcessResults() throws Exception { public void testProcessResults() throws Exception {

View File

@ -0,0 +1,54 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.xpack.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase;
public class BasicDistributedJobsIT extends BaseMlIntegTestCase {
public void testFailOverBasics() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(4);
ensureStableCluster(4);
Job.Builder job = createJob("job_id");
PutJobAction.Request putJobRequest = new PutJobAction.Request(job.build(true, job.getId()));
PutJobAction.Response putJobResponse = client().execute(PutJobAction.INSTANCE, putJobRequest).get();
assertTrue(putJobResponse.isAcknowledged());
ensureGreen();
OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId());
client().execute(OpenJobAction.INSTANCE, openJobRequest).get();
assertBusy(() -> {
GetJobsStatsAction.Response statsResponse =
client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet();
assertEquals(JobState.OPENED, statsResponse.getResponse().results().get(0).getState());
});
internalCluster().stopRandomDataNode();
ensureStableCluster(3);
ensureGreen();
assertBusy(() -> {
GetJobsStatsAction.Response statsResponse =
client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet();
assertEquals(JobState.OPENED, statsResponse.getResponse().results().get(0).getState());
});
internalCluster().stopRandomDataNode();
ensureStableCluster(2);
ensureGreen();
assertBusy(() -> {
GetJobsStatsAction.Response statsResponse =
client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet();
assertEquals(JobState.OPENED, statsResponse.getResponse().results().get(0).getState());
});
cleanupWorkaround(2);
}
}

View File

@ -414,7 +414,7 @@ public class DatafeedJobIT extends ESRestTestCase {
@After @After
public void clearMlState() throws Exception { public void clearMlState() throws Exception {
new MlRestTestStateCleaner(client(), this).clearMlMetadata(); new MlRestTestStateCleaner(logger, client(), this).clearMlMetadata();
} }
private static class DatafeedBuilder { private static class DatafeedBuilder {

View File

@ -361,6 +361,6 @@ public class MlJobIT extends ESRestTestCase {
@After @After
public void clearMlState() throws IOException { public void clearMlState() throws IOException {
new MlRestTestStateCleaner(client(), this).clearMlMetadata(); new MlRestTestStateCleaner(logger, client(), this).clearMlMetadata();
} }
} }

View File

@ -5,6 +5,7 @@
*/ */
package org.elasticsearch.xpack.ml.integration; package org.elasticsearch.xpack.ml.integration;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ESRestTestCase;
@ -16,10 +17,12 @@ import java.util.Map;
public class MlRestTestStateCleaner { public class MlRestTestStateCleaner {
private final Logger logger;
private final RestClient client; private final RestClient client;
private final ESRestTestCase testCase; private final ESRestTestCase testCase;
public MlRestTestStateCleaner(RestClient client, ESRestTestCase testCase) { public MlRestTestStateCleaner(Logger logger, RestClient client, ESRestTestCase testCase) {
this.logger = logger;
this.client = client; this.client = client;
this.testCase = testCase; this.testCase = testCase;
} }
@ -44,7 +47,11 @@ public class MlRestTestStateCleaner {
try { try {
client.performRequest("POST", "/_xpack/ml/datafeeds/" + datafeedId + "/_stop"); client.performRequest("POST", "/_xpack/ml/datafeeds/" + datafeedId + "/_stop");
} catch (Exception e) { } catch (Exception e) {
// ignore if (e.getMessage().contains("datafeed already stopped, expected datafeed state [started], but got [stopped]")) {
logger.debug("failed to stop datafeed [" + datafeedId + "]", e);
} else {
logger.warn("failed to stop datafeed [" + datafeedId + "]", e);
}
} }
client.performRequest("DELETE", "/_xpack/ml/datafeeds/" + datafeedId); client.performRequest("DELETE", "/_xpack/ml/datafeeds/" + datafeedId);
} }
@ -65,7 +72,11 @@ public class MlRestTestStateCleaner {
try { try {
client.performRequest("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close"); client.performRequest("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close");
} catch (Exception e) { } catch (Exception e) {
// ignore if (e.getMessage().contains("cannot close job, expected job state [opened], but got [closed]")) {
logger.debug("failed to close job [" + jobId + "]", e);
} else {
logger.warn("failed to close job [" + jobId + "]", e);
}
} }
client.performRequest("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId); client.performRequest("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId);
} }

View File

@ -5,79 +5,42 @@
*/ */
package org.elasticsearch.xpack.ml.integration; package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.client.Client; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.ml.action.CloseJobAction;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.DatafeedJobsIT;
import org.elasticsearch.xpack.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.ml.action.OpenJobAction; import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.config.DataDescription;
import org.elasticsearch.xpack.ml.job.config.Detector;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
import org.elasticsearch.xpack.persistent.PersistentActionRequest; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress; import org.elasticsearch.xpack.persistent.PersistentActionResponse;
import org.junit.After;
import java.io.IOException; import java.util.concurrent.ExecutionException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.test.XContentTestUtils.convertToMap; public class TooManyJobsIT extends BaseMlIntegTestCase {
import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder;
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
@ESIntegTestCase.ClusterScope(numDataNodes = 1)
public class TooManyJobsIT extends SecurityIntegTestCase {
@Override public void testSingleNode() throws Exception {
protected Settings externalClusterClientSettings() { verifyMaxNumberOfJobsLimit(1, randomIntBetween(1, 32));
return Settings.builder().put(super.externalClusterClientSettings()).put("transport.type", "security4")
.put(MlPlugin.ML_ENABLED.getKey(), true)
.put(ThreadContext.PREFIX + ".Authorization",
basicAuthHeaderValue("elastic", new SecuredString("changeme".toCharArray())))
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(XPackPlugin.class);
} }
@Override public void testMultipleNodes() throws Exception {
protected Collection<Class<? extends Plugin>> transportClientPlugins() { verifyMaxNumberOfJobsLimit(3, randomIntBetween(1, 32));
return nodePlugins();
} }
@After private void verifyMaxNumberOfJobsLimit(int numNodes, int maxNumberOfJobsPerNode) throws Exception {
public void clearMlMetadata() throws Exception { // clear all nodes, so that we can set max_running_jobs setting:
DatafeedJobsIT.clearMlMetadata(client()); internalCluster().ensureAtMostNumDataNodes(0);
} logger.info("[{}] is [{}]", AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE.getKey(), maxNumberOfJobsPerNode);
for (int i = 0; i < numNodes; i++) {
internalCluster().startNode(Settings.builder()
.put(AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE.getKey(), maxNumberOfJobsPerNode));
}
logger.info("Started [{}] nodes", numNodes);
public void testCannotStartTooManyAnalyticalProcesses() throws Exception { int clusterWideMaxNumberOfJobs = numNodes * maxNumberOfJobsPerNode;
int maxRunningJobsPerNode = AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE.getDefault(Settings.EMPTY); for (int i = 1; i <= (clusterWideMaxNumberOfJobs + 1); i++) {
logger.info("[{}] is [{}]", AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE.getKey(), maxRunningJobsPerNode);
for (int i = 1; i <= (maxRunningJobsPerNode + 1); i++) {
Job.Builder job = createJob(Integer.toString(i)); Job.Builder job = createJob(Integer.toString(i));
PutJobAction.Request putJobRequest = new PutJobAction.Request(job.build(true, job.getId())); PutJobAction.Request putJobRequest = new PutJobAction.Request(job.build(true, job.getId()));
PutJobAction.Response putJobResponse = client().execute(PutJobAction.INSTANCE, putJobRequest).get(); PutJobAction.Response putJobResponse = client().execute(PutJobAction.INSTANCE, putJobRequest).get();
@ -85,111 +48,34 @@ public class TooManyJobsIT extends SecurityIntegTestCase {
try { try {
OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId()); OpenJobAction.Request openJobRequest = new OpenJobAction.Request(job.getId());
OpenJobAction.Response openJobResponse = client().execute(OpenJobAction.INSTANCE, openJobRequest).get(); PersistentActionResponse openJobResponse = client().execute(OpenJobAction.INSTANCE, openJobRequest).get();
assertTrue(openJobResponse.isOpened());
assertBusy(() -> { assertBusy(() -> {
GetJobsStatsAction.Response statsResponse = GetJobsStatsAction.Response statsResponse =
client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet(); client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet();
assertEquals(statsResponse.getResponse().results().get(0).getState(), JobState.OPENED); assertEquals(statsResponse.getResponse().results().get(0).getState(), JobState.OPENED);
}); });
logger.info("Opened {}th job", i); logger.info("Opened {}th job", i);
} catch (Exception e) { } catch (ExecutionException e) {
Throwable cause = e.getCause().getCause(); Exception cause = (Exception) e.getCause();
if (IllegalArgumentException.class.equals(cause.getClass()) == false) { assertEquals(ElasticsearchStatusException.class, cause.getClass());
logger.warn("Unexpected cause", e); assertEquals("[" + i + "] expected state [" + JobState.OPENED + "] but got [" + JobState.FAILED +"]", cause.getMessage());
}
assertEquals(IllegalArgumentException.class, cause.getClass());
assertEquals("Timeout expired while waiting for job state to change to [opened]", cause.getMessage());
logger.info("good news everybody --> reached maximum number of allowed opened jobs, after trying to open the {}th job", i); logger.info("good news everybody --> reached maximum number of allowed opened jobs, after trying to open the {}th job", i);
// now manually clean things up and see if we can succeed to run one new job // close the first job and check if the latest job gets opened:
clearMlMetadata(); CloseJobAction.Request closeRequest = new CloseJobAction.Request("1");
putJobResponse = client().execute(PutJobAction.INSTANCE, putJobRequest).get(); CloseJobAction.Response closeResponse = client().execute(CloseJobAction.INSTANCE, closeRequest).actionGet();
assertTrue(putJobResponse.isAcknowledged()); assertTrue(closeResponse.isClosed());
OpenJobAction.Response openJobResponse = client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId()))
.get();
assertTrue(openJobResponse.isOpened());
assertBusy(() -> { assertBusy(() -> {
GetJobsStatsAction.Response statsResponse = GetJobsStatsAction.Response statsResponse =
client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet(); client().execute(GetJobsStatsAction.INSTANCE, new GetJobsStatsAction.Request(job.getId())).actionGet();
assertEquals(statsResponse.getResponse().results().get(0).getState(), JobState.OPENED); assertEquals(statsResponse.getResponse().results().get(0).getState(), JobState.OPENED);
}); });
cleanupWorkaround(numNodes);
return; return;
} }
} }
cleanupWorkaround(numNodes);
fail("shouldn't be able to add more than [" + maxRunningJobsPerNode + "] jobs"); fail("shouldn't be able to add more than [" + clusterWideMaxNumberOfJobs + "] jobs");
}
private Job.Builder createJob(String id) {
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setFormat(DataDescription.DataFormat.JSON);
dataDescription.setTimeFormat(DataDescription.EPOCH_MS);
Detector.Builder d = new Detector.Builder("count", null);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
Job.Builder builder = new Job.Builder();
builder.setId(id);
builder.setAnalysisConfig(analysisConfig);
builder.setDataDescription(dataDescription);
return builder;
}
@Override
protected void ensureClusterStateConsistency() throws IOException {
ensureClusterStateConsistencyWorkAround();
}
// TODO: Fix in ES. In ESIntegTestCase we should get all NamedWriteableRegistry.Entry entries from ESIntegTestCase#nodePlugins()
public static void ensureClusterStateConsistencyWorkAround() throws IOException {
if (cluster() != null && cluster().size() > 0) {
List<NamedWriteableRegistry.Entry> namedWritables = new ArrayList<>(ClusterModule.getNamedWriteables());
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
namedWritables.addAll(searchModule.getNamedWriteables());
namedWritables.add(new NamedWriteableRegistry.Entry(MetaData.Custom.class, "ml", MlMetadata::new));
namedWritables.add(new NamedWriteableRegistry.Entry(ClusterState.Custom.class, PersistentTasksInProgress.TYPE,
PersistentTasksInProgress::new));
namedWritables.add(new NamedWriteableRegistry.Entry(PersistentActionRequest.class, StartDatafeedAction.NAME,
StartDatafeedAction.Request::new));
final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWritables);
ClusterState masterClusterState = client().admin().cluster().prepareState().all().get().getState();
byte[] masterClusterStateBytes = ClusterState.Builder.toBytes(masterClusterState);
// remove local node reference
masterClusterState = ClusterState.Builder.fromBytes(masterClusterStateBytes, null, namedWriteableRegistry);
Map<String, Object> masterStateMap = convertToMap(masterClusterState);
int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length;
String masterId = masterClusterState.nodes().getMasterNodeId();
for (Client client : cluster().getClients()) {
ClusterState localClusterState = client.admin().cluster().prepareState().all().setLocal(true).get().getState();
byte[] localClusterStateBytes = ClusterState.Builder.toBytes(localClusterState);
// remove local node reference
localClusterState = ClusterState.Builder.fromBytes(localClusterStateBytes, null, namedWriteableRegistry);
final Map<String, Object> localStateMap = convertToMap(localClusterState);
final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length;
// Check that the non-master node has the same version of the cluster state as the master and
// that the master node matches the master (otherwise there is no requirement for the cluster state to match)
if (masterClusterState.version() == localClusterState.version() &&
masterId.equals(localClusterState.nodes().getMasterNodeId())) {
try {
assertEquals("clusterstate UUID does not match", masterClusterState.stateUUID(),
localClusterState.stateUUID());
// We cannot compare serialization bytes since serialization order of maps is not guaranteed
// but we can compare serialization sizes - they should be the same
assertEquals("clusterstate size does not match", masterClusterStateSize, localClusterStateSize);
// Compare JSON serialization
assertNull("clusterstate JSON serialization does not match",
differenceBetweenMapsIgnoringArrayOrder(masterStateMap, localStateMap));
} catch (AssertionError error) {
fail("Cluster state from master:\n" + masterClusterState.toString() + "\nLocal cluster state:\n" +
localClusterState.toString());
throw error;
}
}
}
}
} }
} }

View File

@ -22,18 +22,17 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.action.util.QueryPage;
import org.elasticsearch.xpack.ml.notifications.Auditor;
import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister;
import org.elasticsearch.xpack.ml.notifications.Auditor;
import org.junit.Before; import org.junit.Before;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.function.BiConsumer;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.elasticsearch.xpack.ml.job.config.JobTests.buildJobBuilder; import static org.elasticsearch.xpack.ml.job.config.JobTests.buildJobBuilder;
@ -43,8 +42,6 @@ import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq; import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
public class JobManagerTests extends ESTestCase { public class JobManagerTests extends ESTestCase {
@ -96,21 +93,6 @@ public class JobManagerTests extends ESTestCase {
assertEquals(job, jobManager.getJobOrThrowIfUnknown(cs, "foo")); assertEquals(job, jobManager.getJobOrThrowIfUnknown(cs, "foo"));
} }
public void tesGetJobAllocation() {
JobManager jobManager = createJobManager();
Job job = buildJobBuilder("foo").build();
MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false)
.assignToNode("foo", "nodeId")
.build();
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
.metaData(MetaData.builder().putCustom(MlMetadata.TYPE, mlMetadata)).build();
when(clusterService.state()).thenReturn(cs);
assertEquals("nodeId", jobManager.getJobAllocation("foo").getNodeId());
expectThrows(ResourceNotFoundException.class, () -> jobManager.getJobAllocation("bar"));
}
public void testGetJob_GivenJobIdIsAll() { public void testGetJob_GivenJobIdIsAll() {
MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); MlMetadata.Builder mlMetadata = new MlMetadata.Builder();
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {

View File

@ -1,43 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.job.metadata;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase;
public class AllocationTests extends AbstractSerializingTestCase<Allocation> {
@Override
protected Allocation createTestInstance() {
String nodeId = randomBoolean() ? randomAsciiOfLength(10) : null;
String jobId = randomAsciiOfLength(10);
boolean ignoreDowntime = randomBoolean();
JobState jobState = randomFrom(JobState.values());
String stateReason = randomBoolean() ? randomAsciiOfLength(10) : null;
return new Allocation(nodeId, jobId, ignoreDowntime, jobState, stateReason);
}
@Override
protected Writeable.Reader<Allocation> instanceReader() {
return Allocation::new;
}
@Override
protected Allocation parseInstance(XContentParser parser) {
return Allocation.PARSER.apply(parser, null).build();
}
public void testUnsetIgnoreDownTime() {
Allocation allocation = new Allocation("_node_id", "_job_id", true, JobState.OPENING, null);
assertTrue(allocation.isIgnoreDowntime());
Allocation.Builder builder = new Allocation.Builder(allocation);
builder.setState(JobState.OPENED);
allocation = builder.build();
assertFalse(allocation.isIgnoreDowntime());
}
}

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
@ -24,6 +25,7 @@ import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.config.JobTests; import org.elasticsearch.xpack.ml.job.config.JobTests;
import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase; import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress; import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
@ -32,7 +34,6 @@ import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createD
import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedJob; import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedJob;
import static org.elasticsearch.xpack.ml.job.config.JobTests.buildJobBuilder; import static org.elasticsearch.xpack.ml.job.config.JobTests.buildJobBuilder;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.sameInstance;
@ -58,15 +59,6 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
} else { } else {
builder.putJob(job, false); builder.putJob(job, false);
} }
if (randomBoolean()) {
builder.updateState(job.getId(), JobState.OPENING, randomBoolean() ? "first reason" : null);
if (randomBoolean()) {
builder.updateState(job.getId(), JobState.OPENED, randomBoolean() ? "second reason" : null);
if (randomBoolean()) {
builder.updateState(job.getId(), JobState.CLOSING, randomBoolean() ? "third reason" : null);
}
}
}
} }
return builder.build(); return builder.build();
} }
@ -105,10 +97,8 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("1"), sameInstance(job1)); assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getState(), equalTo(JobState.CLOSED));
assertThat(result.getDatafeeds().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
assertThat(result.getJobs().get("2"), sameInstance(job2)); assertThat(result.getJobs().get("2"), sameInstance(job2));
assertThat(result.getAllocations().get("2").getState(), equalTo(JobState.CLOSED));
assertThat(result.getDatafeeds().get("2"), nullValue()); assertThat(result.getDatafeeds().get("2"), nullValue());
builder = new MlMetadata.Builder(result); builder = new MlMetadata.Builder(result);
@ -126,25 +116,23 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
} }
public void testRemoveJob() { public void testRemoveJob() {
Job job1 = buildJobBuilder("1").build(); Job.Builder jobBuilder = buildJobBuilder("1");
jobBuilder.setDeleted(true);
Job job1 = jobBuilder.build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("1"), sameInstance(job1)); assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getState(), equalTo(JobState.CLOSED));
assertThat(result.getDatafeeds().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
builder = new MlMetadata.Builder(result); builder = new MlMetadata.Builder(result);
builder.updateState("1", JobState.DELETING, null);
assertThat(result.getJobs().get("1"), sameInstance(job1)); assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getState(), equalTo(JobState.CLOSED));
assertThat(result.getDatafeeds().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
builder.deleteJob("1"); builder.deleteJob("1", new PersistentTasksInProgress(0L, Collections.emptyMap()));
result = builder.build(); result = builder.build();
assertThat(result.getJobs().get("1"), nullValue()); assertThat(result.getJobs().get("1"), nullValue());
assertThat(result.getAllocations().get("1"), nullValue());
assertThat(result.getDatafeeds().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
} }
@ -152,16 +140,19 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
Job job1 = buildJobBuilder("1").build(); Job job1 = buildJobBuilder("1").build();
MlMetadata.Builder builder1 = new MlMetadata.Builder(); MlMetadata.Builder builder1 = new MlMetadata.Builder();
builder1.putJob(job1, false); builder1.putJob(job1, false);
builder1.updateState("1", JobState.OPENING, null);
builder1.updateState("1", JobState.OPENED, null);
MlMetadata result = builder1.build(); MlMetadata result = builder1.build();
assertThat(result.getJobs().get("1"), sameInstance(job1)); assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getState(), equalTo(JobState.OPENED));
assertThat(result.getDatafeeds().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
PersistentTaskInProgress<OpenJobAction.Request> task =
new PersistentTaskInProgress<>(
new PersistentTaskInProgress<>(0L, OpenJobAction.NAME, new OpenJobAction.Request("1"), null),
JobState.CLOSED
);
MlMetadata.Builder builder2 = new MlMetadata.Builder(result); MlMetadata.Builder builder2 = new MlMetadata.Builder(result);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder2.deleteJob("1")); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> builder2.deleteJob("1", new PersistentTasksInProgress(0L, Collections.singletonMap(0L, task))));
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
} }
@ -172,7 +163,8 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1); builder.putDatafeed(datafeedConfig1);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder.deleteJob(job1.getId())); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> builder.deleteJob(job1.getId(), new PersistentTasksInProgress(0L, Collections.emptyMap())));
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
String expectedMsg = "Cannot delete job [" + job1.getId() + "] while datafeed [" + datafeedConfig1.getId() + "] refers to it"; String expectedMsg = "Cannot delete job [" + job1.getId() + "] while datafeed [" + datafeedConfig1.getId() + "] refers to it";
assertThat(e.getMessage(), equalTo(expectedMsg)); assertThat(e.getMessage(), equalTo(expectedMsg));
@ -180,7 +172,8 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
public void testRemoveJob_failBecauseJobDoesNotExist() { public void testRemoveJob_failBecauseJobDoesNotExist() {
MlMetadata.Builder builder1 = new MlMetadata.Builder(); MlMetadata.Builder builder1 = new MlMetadata.Builder();
expectThrows(ResourceNotFoundException.class, () -> builder1.deleteJob("1")); expectThrows(ResourceNotFoundException.class,
() -> builder1.deleteJob("1", new PersistentTasksInProgress(0L, Collections.emptyMap())));
} }
public void testCrudDatafeed() { public void testCrudDatafeed() {
@ -192,14 +185,12 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1)); assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getState(), equalTo(JobState.CLOSED));
assertThat(result.getDatafeeds().get("datafeed1"), sameInstance(datafeedConfig1)); assertThat(result.getDatafeeds().get("datafeed1"), sameInstance(datafeedConfig1));
builder = new MlMetadata.Builder(result); builder = new MlMetadata.Builder(result);
builder.removeDatafeed("datafeed1", new PersistentTasksInProgress(0, Collections.emptyMap())); builder.removeDatafeed("datafeed1", new PersistentTasksInProgress(0, Collections.emptyMap()));
result = builder.build(); result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1)); assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getState(), equalTo(JobState.CLOSED));
assertThat(result.getDatafeeds().get("datafeed1"), nullValue()); assertThat(result.getDatafeeds().get("datafeed1"), nullValue());
} }
@ -251,17 +242,14 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putDatafeed(datafeedConfig1); builder.putDatafeed(datafeedConfig1);
builder.updateState("foo", JobState.OPENING, null);
builder.updateState("foo", JobState.OPENED, null);
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1)); assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getState(), equalTo(JobState.OPENED));
assertThat(result.getDatafeeds().get("datafeed1"), sameInstance(datafeedConfig1)); assertThat(result.getDatafeeds().get("datafeed1"), sameInstance(datafeedConfig1));
StartDatafeedAction.Request request = new StartDatafeedAction.Request("datafeed1", 0L); StartDatafeedAction.Request request = new StartDatafeedAction.Request("datafeed1", 0L);
PersistentTasksInProgress.PersistentTaskInProgress<StartDatafeedAction.Request> taskInProgress = PersistentTaskInProgress<StartDatafeedAction.Request> taskInProgress =
new PersistentTasksInProgress.PersistentTaskInProgress<>(0, StartDatafeedAction.NAME, request, null); new PersistentTaskInProgress<>(0, StartDatafeedAction.NAME, request, null);
PersistentTasksInProgress tasksInProgress = PersistentTasksInProgress tasksInProgress =
new PersistentTasksInProgress(1, Collections.singletonMap(taskInProgress.getId(), taskInProgress)); new PersistentTasksInProgress(1, Collections.singletonMap(taskInProgress.getId(), taskInProgress));
@ -271,27 +259,4 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
} }
public void testUpdateAllocation_setFinishedTime() {
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(buildJobBuilder("my_job_id").build(), false);
builder.updateState("my_job_id", JobState.OPENING, null);
builder.updateState("my_job_id", JobState.OPENED, null);
MlMetadata mlMetadata = builder.build();
assertThat(mlMetadata.getJobs().get("my_job_id").getFinishedTime(), nullValue());
builder.updateState("my_job_id", JobState.CLOSED, null);
mlMetadata = builder.build();
assertThat(mlMetadata.getJobs().get("my_job_id").getFinishedTime(), notNullValue());
}
public void testUpdateState_failBecauseJobDoesNotExist() {
MlMetadata.Builder builder = new MlMetadata.Builder();
expectThrows(ResourceNotFoundException.class, () -> builder.updateState("missing-job", JobState.CLOSED, "for testting"));
}
public void testSetIgnoreDowntime_failBecauseJobDoesNotExist() {
MlMetadata.Builder builder = new MlMetadata.Builder();
expectThrows(ResourceNotFoundException.class, () -> builder.setIgnoreDowntime("missing-job"));
}
} }

View File

@ -5,7 +5,6 @@
*/ */
package org.elasticsearch.xpack.ml.job.persistence; package org.elasticsearch.xpack.ml.job.persistence;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
@ -18,6 +17,7 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
@ -27,14 +27,12 @@ import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.action.DeleteJobAction; import org.elasticsearch.xpack.ml.action.DeleteJobAction;
import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.action.util.QueryPage;
import org.elasticsearch.xpack.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.ml.job.config.AnalysisLimits;
import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.ml.job.process.autodetect.state.CategorizerState;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelState; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelState;
import org.elasticsearch.xpack.ml.notifications.AuditActivity;
import org.elasticsearch.xpack.ml.notifications.AuditMessage;
import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles;
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.ml.job.results.Bucket; import org.elasticsearch.xpack.ml.job.results.Bucket;
@ -42,6 +40,8 @@ import org.elasticsearch.xpack.ml.job.results.CategoryDefinition;
import org.elasticsearch.xpack.ml.job.results.Influencer; import org.elasticsearch.xpack.ml.job.results.Influencer;
import org.elasticsearch.xpack.ml.job.results.PerPartitionMaxProbabilities; import org.elasticsearch.xpack.ml.job.results.PerPartitionMaxProbabilities;
import org.elasticsearch.xpack.ml.job.results.Result; import org.elasticsearch.xpack.ml.job.results.Result;
import org.elasticsearch.xpack.ml.notifications.AuditActivity;
import org.elasticsearch.xpack.ml.notifications.AuditMessage;
import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.notifications.Auditor;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
@ -1150,7 +1150,7 @@ public class JobProviderTests extends ESTestCase {
} }
private JobProvider createProvider(Client client) { private JobProvider createProvider(Client client) {
return new JobProvider(client, 0); return new JobProvider(client, 0, TimeValue.timeValueSeconds(1));
} }
private static GetResponse createGetResponse(boolean exists, Map<String, Object> source) throws IOException { private static GetResponse createGetResponse(boolean exists, Map<String, Object> source) throws IOException {
@ -1173,7 +1173,7 @@ public class JobProviderTests extends ESTestCase {
SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), new Text("foo"), fields) SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), new Text("foo"), fields)
.sourceRef(XContentFactory.jsonBuilder().map(_source).bytes()); .sourceRef(XContentFactory.jsonBuilder().map(_source).bytes());
list.add(hit); list.add(hit);
} }
SearchHits hits = new SearchHits(list.toArray(new SearchHit[0]), source.size(), 1); SearchHits hits = new SearchHits(list.toArray(new SearchHit[0]), source.size(), 1);

View File

@ -146,7 +146,8 @@ public class AutodetectCommunicatorTests extends ESTestCase {
return null; return null;
}).when(executorService).execute(any(Runnable.class)); }).when(executorService).execute(any(Runnable.class));
DataCountsReporter dataCountsReporter = mock(DataCountsReporter.class); DataCountsReporter dataCountsReporter = mock(DataCountsReporter.class);
return new AutodetectCommunicator(createJobDetails(), autodetectProcess, dataCountsReporter, autoDetectResultProcessor, e -> {}); return new AutodetectCommunicator(0L, createJobDetails(), autodetectProcess, dataCountsReporter, autoDetectResultProcessor,
e -> {});
} }
public void testWriteToJobInUse() throws IOException { public void testWriteToJobInUse() throws IOException {

View File

@ -6,15 +6,14 @@
package org.elasticsearch.xpack.ml.job.process.autodetect; package org.elasticsearch.xpack.ml.job.process.autodetect;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.UpdateJobStateAction;
import org.elasticsearch.xpack.ml.action.util.QueryPage; import org.elasticsearch.xpack.ml.action.util.QueryPage;
import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.ml.job.config.AnalysisConfig;
@ -25,7 +24,6 @@ import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.config.MlFilter; import org.elasticsearch.xpack.ml.job.config.MlFilter;
import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig; import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig;
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister;
@ -36,6 +34,7 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles;
import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerFactory; import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerFactory;
import org.elasticsearch.xpack.persistent.UpdatePersistentTaskStatusAction;
import org.junit.Before; import org.junit.Before;
import org.mockito.Mockito; import org.mockito.Mockito;
@ -62,6 +61,7 @@ import static org.hamcrest.core.IsEqual.equalTo;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq; import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@ -128,11 +128,11 @@ public class AutodetectProcessManagerTests extends ESTestCase {
when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo")); when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo"));
AutodetectProcessManager manager = createManager(communicator, client); AutodetectProcessManager manager = createManager(communicator, client);
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
assertEquals(1, manager.numberOfOpenJobs()); assertEquals(1, manager.numberOfOpenJobs());
assertTrue(manager.jobHasActiveAutodetectProcess("foo")); assertTrue(manager.jobHasActiveAutodetectProcess("foo"));
UpdateJobStateAction.Request expectedRequest = new UpdateJobStateAction.Request("foo", JobState.OPENED); UpdatePersistentTaskStatusAction.Request expectedRequest = new UpdatePersistentTaskStatusAction.Request(1L, JobState.OPENED);
verify(client).execute(eq(UpdateJobStateAction.INSTANCE), eq(expectedRequest), any()); verify(client).execute(eq(UpdatePersistentTaskStatusAction.INSTANCE), eq(expectedRequest), any());
} }
public void testOpenJob_exceedMaxNumJobs() { public void testOpenJob_exceedMaxNumJobs() {
@ -173,18 +173,26 @@ public class AutodetectProcessManagerTests extends ESTestCase {
consumer.accept(dataCounts, modelSnapshot, quantiles, filters); consumer.accept(dataCounts, modelSnapshot, quantiles, filters);
return null; return null;
}).when(manager).gatherRequiredInformation(any(), any(), any()); }).when(manager).gatherRequiredInformation(any(), any(), any());
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
CheckedConsumer<Exception, IOException> consumer = (CheckedConsumer<Exception, IOException>) invocationOnMock.getArguments()[2];
consumer.accept(null);
return null;
}).when(manager).setJobState(anyLong(), eq(JobState.FAILED), any());
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
manager.openJob("bar", false, e -> {}); manager.openJob("bar", 2L, false, e -> {});
manager.openJob("baz", false, e -> {}); manager.openJob("baz", 3L, false, e -> {});
assertEquals(3, manager.numberOfOpenJobs()); assertEquals(3, manager.numberOfOpenJobs());
Exception e = expectThrows(ElasticsearchStatusException.class, () -> manager.openJob("foobar", false, e1 -> {})); Exception[] holder = new Exception[1];
manager.openJob("foobar", 4L, false, e -> holder[0] = e);
Exception e = holder[0];
assertEquals("max running job capacity [3] reached", e.getMessage()); assertEquals("max running job capacity [3] reached", e.getMessage());
manager.closeJob("baz"); manager.closeJob("baz", null);
assertEquals(2, manager.numberOfOpenJobs()); assertEquals(2, manager.numberOfOpenJobs());
manager.openJob("foobar", false, e1 -> {}); manager.openJob("foobar", 4L, false, e1 -> {});
assertEquals(3, manager.numberOfOpenJobs()); assertEquals(3, manager.numberOfOpenJobs());
} }
@ -194,7 +202,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
assertEquals(0, manager.numberOfOpenJobs()); assertEquals(0, manager.numberOfOpenJobs());
DataLoadParams params = new DataLoadParams(TimeRange.builder().build(), Optional.empty()); DataLoadParams params = new DataLoadParams(TimeRange.builder().build(), Optional.empty());
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
manager.processData("foo", createInputStream(""), params); manager.processData("foo", createInputStream(""), params);
assertEquals(1, manager.numberOfOpenJobs()); assertEquals(1, manager.numberOfOpenJobs());
} }
@ -207,7 +215,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
InputStream inputStream = createInputStream(""); InputStream inputStream = createInputStream("");
doThrow(new IOException("blah")).when(communicator).writeToJob(inputStream, params); doThrow(new IOException("blah")).when(communicator).writeToJob(inputStream, params);
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
ESTestCase.expectThrows(ElasticsearchException.class, ESTestCase.expectThrows(ElasticsearchException.class,
() -> manager.processData("foo", inputStream, params)); () -> manager.processData("foo", inputStream, params));
} }
@ -217,12 +225,12 @@ public class AutodetectProcessManagerTests extends ESTestCase {
AutodetectProcessManager manager = createManager(communicator); AutodetectProcessManager manager = createManager(communicator);
assertEquals(0, manager.numberOfOpenJobs()); assertEquals(0, manager.numberOfOpenJobs());
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
manager.processData("foo", createInputStream(""), mock(DataLoadParams.class)); manager.processData("foo", createInputStream(""), mock(DataLoadParams.class));
// job is created // job is created
assertEquals(1, manager.numberOfOpenJobs()); assertEquals(1, manager.numberOfOpenJobs());
manager.closeJob("foo"); manager.closeJob("foo", null);
assertEquals(0, manager.numberOfOpenJobs()); assertEquals(0, manager.numberOfOpenJobs());
} }
@ -232,7 +240,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
DataLoadParams params = new DataLoadParams(TimeRange.builder().startTime("1000").endTime("2000").build(), Optional.empty()); DataLoadParams params = new DataLoadParams(TimeRange.builder().startTime("1000").endTime("2000").build(), Optional.empty());
InputStream inputStream = createInputStream(""); InputStream inputStream = createInputStream("");
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
manager.processData("foo", inputStream, params); manager.processData("foo", inputStream, params);
verify(communicator).writeToJob(inputStream, params); verify(communicator).writeToJob(inputStream, params);
} }
@ -242,7 +250,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
AutodetectProcessManager manager = createManager(communicator); AutodetectProcessManager manager = createManager(communicator);
InputStream inputStream = createInputStream(""); InputStream inputStream = createInputStream("");
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
manager.processData("foo", inputStream, mock(DataLoadParams.class)); manager.processData("foo", inputStream, mock(DataLoadParams.class));
InterimResultsParams params = InterimResultsParams.builder().build(); InterimResultsParams params = InterimResultsParams.builder().build();
@ -283,7 +291,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
AutodetectProcessManager manager = createManager(communicator); AutodetectProcessManager manager = createManager(communicator);
assertFalse(manager.jobHasActiveAutodetectProcess("foo")); assertFalse(manager.jobHasActiveAutodetectProcess("foo"));
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
manager.processData("foo", createInputStream(""), mock(DataLoadParams.class)); manager.processData("foo", createInputStream(""), mock(DataLoadParams.class));
assertTrue(manager.jobHasActiveAutodetectProcess("foo")); assertTrue(manager.jobHasActiveAutodetectProcess("foo"));
@ -297,7 +305,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
givenAllocationWithState(JobState.OPENED); givenAllocationWithState(JobState.OPENED);
InputStream inputStream = createInputStream(""); InputStream inputStream = createInputStream("");
manager.openJob("foo", false, e -> {}); manager.openJob("foo", 1L, false, e -> {});
DataCounts dataCounts = manager.processData("foo", inputStream, mock(DataLoadParams.class)); DataCounts dataCounts = manager.processData("foo", inputStream, mock(DataLoadParams.class));
assertThat(dataCounts, equalTo(new DataCounts("foo"))); assertThat(dataCounts, equalTo(new DataCounts("foo")));
@ -325,14 +333,12 @@ public class AutodetectProcessManagerTests extends ESTestCase {
jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory); jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory);
expectThrows(EsRejectedExecutionException.class, expectThrows(EsRejectedExecutionException.class,
() -> manager.create("my_id", dataCounts, modelSnapshot, quantiles, filters, false, e -> {})); () -> manager.create("my_id", 1L, dataCounts, modelSnapshot, quantiles, filters, false, e -> {}));
verify(autodetectProcess, times(1)).close(); verify(autodetectProcess, times(1)).close();
} }
private void givenAllocationWithState(JobState state) { private void givenAllocationWithState(JobState state) {
Allocation.Builder allocation = new Allocation.Builder(); when(jobManager.getJobState("foo")).thenReturn(state);
allocation.setState(state);
when(jobManager.getJobAllocation("foo")).thenReturn(allocation.build());
} }
private AutodetectProcessManager createManager(AutodetectCommunicator communicator) { private AutodetectProcessManager createManager(AutodetectCommunicator communicator) {
@ -347,13 +353,13 @@ public class AutodetectProcessManagerTests extends ESTestCase {
jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory); jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory);
manager = spy(manager); manager = spy(manager);
doReturn(communicator).when(manager) doReturn(communicator).when(manager)
.create(any(), eq(dataCounts), eq(modelSnapshot), eq(quantiles), eq(filters), anyBoolean(), any()); .create(any(), anyLong(), eq(dataCounts), eq(modelSnapshot), eq(quantiles), eq(filters), anyBoolean(), any());
return manager; return manager;
} }
private AutodetectProcessManager createManagerAndCallProcessData(AutodetectCommunicator communicator, String jobId) { private AutodetectProcessManager createManagerAndCallProcessData(AutodetectCommunicator communicator, String jobId) {
AutodetectProcessManager manager = createManager(communicator); AutodetectProcessManager manager = createManager(communicator);
manager.openJob(jobId, false, e -> {}); manager.openJob(jobId, 1L, false, e -> {});
manager.processData(jobId, createInputStream(""), mock(DataLoadParams.class)); manager.processData(jobId, createInputStream(""), mock(DataLoadParams.class));
return manager; return manager;
} }

View File

@ -0,0 +1,135 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.support;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.CloseJobAction;
import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction;
import org.elasticsearch.xpack.ml.action.DeleteJobAction;
import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction;
import org.elasticsearch.xpack.ml.action.StopDatafeedAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedState;
import org.elasticsearch.xpack.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.config.DataDescription;
import org.elasticsearch.xpack.ml.job.config.Detector;
import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.persistent.RemovePersistentTaskAction;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import static org.hamcrest.Matchers.equalTo;
/**
* A base class for testing datafeed and job lifecycle specifics.
*
* Note for other type of integration tests you should use the external test cluster created by the Gradle integTest task.
* For example tests extending this base class test with the non native autodetect process.
*/
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0,
transportClientRatio = 0, supportsDedicatedMasters = false)
public abstract class BaseMlIntegTestCase extends SecurityIntegTestCase {
@Override
protected boolean ignoreExternalCluster() {
return true;
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal));
settings.put(MlPlugin.USE_NATIVE_PROCESS_OPTION.getKey(), false);
settings.put(MlPlugin.ML_ENABLED.getKey(), true);
return settings.build();
}
protected Job.Builder createJob(String id) {
DataDescription.Builder dataDescription = new DataDescription.Builder();
dataDescription.setFormat(DataDescription.DataFormat.JSON);
dataDescription.setTimeFormat(DataDescription.EPOCH_MS);
Detector.Builder d = new Detector.Builder("count", null);
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
Job.Builder builder = new Job.Builder();
builder.setId(id);
builder.setAnalysisConfig(analysisConfig);
builder.setDataDescription(dataDescription);
return builder;
}
// Due to the fact that ml plugin creates the state, notifications and meta indices automatically
// when the test framework removes all indices then ml plugin adds them back. Causing validation to fail
// we should move to templates instead as that will fix the test problem
protected void cleanupWorkaround(int numNodes) throws Exception {
deleteAllDatafeeds(client());
deleteAllJobs(client());
for (int i = 0; i < numNodes; i++) {
internalCluster().stopRandomDataNode();
}
internalCluster().startNode(Settings.builder().put(MlPlugin.ML_ENABLED.getKey(), false));
}
private void deleteAllDatafeeds(Client client) throws Exception {
MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData();
MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE);
for (DatafeedConfig datafeed : mlMetadata.getDatafeeds().values()) {
String datafeedId = datafeed.getId();
try {
RemovePersistentTaskAction.Response stopResponse =
client.execute(StopDatafeedAction.INSTANCE, new StopDatafeedAction.Request(datafeedId)).get();
assertTrue(stopResponse.isAcknowledged());
} catch (ExecutionException e) {
// CONFLICT is ok, as it means the datafeed has already stopped, which isn't an issue at all.
if (RestStatus.CONFLICT != ExceptionsHelper.status(e.getCause())) {
throw new RuntimeException(e);
}
}
assertBusy(() -> {
try {
GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(datafeedId);
GetDatafeedsStatsAction.Response r = client.execute(GetDatafeedsStatsAction.INSTANCE, request).get();
assertThat(r.getResponse().results().get(0).getDatafeedState(), equalTo(DatafeedState.STOPPED));
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
});
DeleteDatafeedAction.Response deleteResponse =
client.execute(DeleteDatafeedAction.INSTANCE, new DeleteDatafeedAction.Request(datafeedId)).get();
assertTrue(deleteResponse.isAcknowledged());
}
}
private void deleteAllJobs(Client client) throws Exception {
MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData();
MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE);
for (Map.Entry<String, Job> entry : mlMetadata.getJobs().entrySet()) {
String jobId = entry.getKey();
try {
CloseJobAction.Response response =
client.execute(CloseJobAction.INSTANCE, new CloseJobAction.Request(jobId)).get();
assertTrue(response.isClosed());
} catch (Exception e) {
logger.warn("Job [" + jobId + "] couldn't be closed", e);
}
DeleteJobAction.Response response =
client.execute(DeleteJobAction.INSTANCE, new DeleteJobAction.Request(jobId)).get();
assertTrue(response.isAcknowledged());
}
}
}

View File

@ -16,14 +16,14 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import org.elasticsearch.xpack.persistent.CompletionPersistentTaskAction.Response;
import org.elasticsearch.xpack.persistent.TestPersistentActionPlugin.TestRequest;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportResponse.Empty; import org.elasticsearch.transport.TransportResponse.Empty;
import org.elasticsearch.xpack.persistent.CompletionPersistentTaskAction.Response;
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
import org.elasticsearch.xpack.persistent.TestPersistentActionPlugin.TestRequest;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -152,7 +152,7 @@ public class PersistentActionCoordinatorTests extends ESTestCase {
ClusterService clusterService = createClusterService(); ClusterService clusterService = createClusterService();
AtomicLong capturedTaskId = new AtomicLong(); AtomicLong capturedTaskId = new AtomicLong();
AtomicReference<ActionListener<CancelTasksResponse>> capturedListener = new AtomicReference<>(); AtomicReference<ActionListener<CancelTasksResponse>> capturedListener = new AtomicReference<>();
PersistentActionService persistentActionService = new PersistentActionService(Settings.EMPTY, null, null) { PersistentActionService persistentActionService = new PersistentActionService(Settings.EMPTY, null, null, null) {
@Override @Override
public void sendCancellation(long taskId, ActionListener<CancelTasksResponse> listener) { public void sendCancellation(long taskId, ActionListener<CancelTasksResponse> listener) {
capturedTaskId.set(taskId); capturedTaskId.set(taskId);
@ -224,7 +224,8 @@ public class PersistentActionCoordinatorTests extends ESTestCase {
AtomicLong capturedTaskId = new AtomicLong(-1L); AtomicLong capturedTaskId = new AtomicLong(-1L);
AtomicReference<Exception> capturedException = new AtomicReference<>(); AtomicReference<Exception> capturedException = new AtomicReference<>();
AtomicReference<ActionListener<Response>> capturedListener = new AtomicReference<>(); AtomicReference<ActionListener<Response>> capturedListener = new AtomicReference<>();
PersistentActionService persistentActionService = new PersistentActionService(Settings.EMPTY, clusterService, null) { PersistentActionService persistentActionService =
new PersistentActionService(Settings.EMPTY, mock(ThreadPool.class), clusterService, null) {
@Override @Override
public void sendCompletionNotification(long taskId, Exception failure, ActionListener<Response> listener) { public void sendCompletionNotification(long taskId, Exception failure, ActionListener<Response> listener) {
capturedTaskId.set(taskId); capturedTaskId.set(taskId);

View File

@ -42,7 +42,6 @@ import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskCancelledException;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponse.Empty; import org.elasticsearch.transport.TransportResponse.Empty;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.watcher.ResourceWatcherService;
@ -57,7 +56,6 @@ import java.util.Objects;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
import static org.elasticsearch.test.ESTestCase.awaitBusy; import static org.elasticsearch.test.ESTestCase.awaitBusy;
@ -87,7 +85,7 @@ public class TestPersistentActionPlugin extends Plugin implements ActionPlugin {
ResourceWatcherService resourceWatcherService, ScriptService scriptService, ResourceWatcherService resourceWatcherService, ScriptService scriptService,
NamedXContentRegistry xContentRegistry) { NamedXContentRegistry xContentRegistry) {
PersistentActionService persistentActionService = new PersistentActionService(Settings.EMPTY, clusterService, client); PersistentActionService persistentActionService = new PersistentActionService(Settings.EMPTY, threadPool, clusterService, client);
PersistentActionRegistry persistentActionRegistry = new PersistentActionRegistry(Settings.EMPTY); PersistentActionRegistry persistentActionRegistry = new PersistentActionRegistry(Settings.EMPTY);
return Arrays.asList( return Arrays.asList(
persistentActionService, persistentActionService,
@ -100,7 +98,7 @@ public class TestPersistentActionPlugin extends Plugin implements ActionPlugin {
public List<NamedWriteableRegistry.Entry> getNamedWriteables() { public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
return Arrays.asList( return Arrays.asList(
new NamedWriteableRegistry.Entry(PersistentActionRequest.class, TestPersistentAction.NAME, TestRequest::new), new NamedWriteableRegistry.Entry(PersistentActionRequest.class, TestPersistentAction.NAME, TestRequest::new),
new NamedWriteableRegistry.Entry(PersistentActionCoordinator.Status.class, new NamedWriteableRegistry.Entry(Task.Status.class,
PersistentActionCoordinator.Status.NAME, PersistentActionCoordinator.Status::new), PersistentActionCoordinator.Status.NAME, PersistentActionCoordinator.Status::new),
new NamedWriteableRegistry.Entry(ClusterState.Custom.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::new), new NamedWriteableRegistry.Entry(ClusterState.Custom.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::new),
new NamedWriteableRegistry.Entry(NamedDiff.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::readDiffFrom), new NamedWriteableRegistry.Entry(NamedDiff.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::readDiffFrom),

View File

@ -26,7 +26,7 @@ public class XPackRestIT extends XPackRestTestCase {
@After @After
public void clearMlState() throws IOException { public void clearMlState() throws IOException {
new MlRestTestStateCleaner(client(), this).clearMlMetadata(); new MlRestTestStateCleaner(logger, client(), this).clearMlMetadata();
} }
public XPackRestIT(ClientYamlTestCandidate testCandidate) { public XPackRestIT(ClientYamlTestCandidate testCandidate) {

View File

@ -106,7 +106,6 @@ cluster:admin/ingest/pipeline/get
cluster:admin/ingest/pipeline/put cluster:admin/ingest/pipeline/put
cluster:admin/ingest/pipeline/simulate cluster:admin/ingest/pipeline/simulate
cluster:admin/ml/filters/get cluster:admin/ml/filters/get
cluster:admin/ml/anomaly_detectors/internal_open
cluster:admin/ml/anomaly_detectors/results/categories/get cluster:admin/ml/anomaly_detectors/results/categories/get
cluster:admin/ml/anomaly_detectors/stats/get cluster:admin/ml/anomaly_detectors/stats/get
cluster:admin/ml/anomaly_detectors/results/buckets/get cluster:admin/ml/anomaly_detectors/results/buckets/get
@ -133,7 +132,6 @@ cluster:admin/ml/datafeeds/stats/get
cluster:admin/ml/datafeeds/stop cluster:admin/ml/datafeeds/stop
cluster:admin/ml/datafeeds/start cluster:admin/ml/datafeeds/start
cluster:admin/ml/anomaly_detectors/open cluster:admin/ml/anomaly_detectors/open
cluster:admin/ml/anomaly_detectors/state/update
cluster:admin/ml/job/update cluster:admin/ml/job/update
indices:data/write/delete/mlbyquery indices:data/write/delete/mlbyquery
cluster:admin/ml/job/update/process cluster:admin/ml/job/update/process

View File

@ -10,7 +10,7 @@
"required": true, "required": true,
"description": "The name of the job to close" "description": "The name of the job to close"
}, },
"close_timeout": { "timeout": {
"type": "time", "type": "time",
"description": "Controls the time to wait until a job has closed. Default to 30 minutes" "description": "Controls the time to wait until a job has closed. Default to 30 minutes"
} }

View File

@ -14,7 +14,7 @@
"type": "boolean", "type": "boolean",
"description": "Controls if gaps in data are treated as anomalous or as a maintenance window after a job re-start" "description": "Controls if gaps in data are treated as anomalous or as a maintenance window after a job re-start"
}, },
"open_timeout": { "timeout": {
"type": "time", "type": "time",
"description": "Controls the time to wait until a job has opened. Default to 30 minutes" "description": "Controls the time to wait until a job has opened. Default to 30 minutes"
} }

View File

@ -88,6 +88,11 @@ setup:
{"airline":"AAL","responsetime":"132.2046","time":"1403481600"} {"airline":"AAL","responsetime":"132.2046","time":"1403481600"}
{"airline":"JZA","responsetime":"990.4628","time":"1403481600"} {"airline":"JZA","responsetime":"990.4628","time":"1403481600"}
- do:
xpack.ml.flush_job:
job_id: job-stats-test
- match: { flushed: true }
- do: - do:
xpack.ml.close_job: xpack.ml.close_job:
job_id: job-stats-test job_id: job-stats-test