Rename scheduler/scheduled to datafeed (elastic/elasticsearch#755)

Relates elastic/elasticsearch#630

The more subtle changes to the datafeed endpoints required by elastic/elasticsearch#630
are NOT in this commit, as they would be drowned out by the rename

Original commit: elastic/x-pack-elasticsearch@3318971da9
This commit is contained in:
David Roberts 2017-01-19 13:44:19 +00:00 committed by GitHub
parent 10441a3e38
commit 36bdcaff5d
90 changed files with 1612 additions and 1612 deletions

View File

@ -31,7 +31,7 @@ import org.elasticsearch.xpack.ml.action.CloseJobAction;
import org.elasticsearch.xpack.ml.action.DeleteJobAction; import org.elasticsearch.xpack.ml.action.DeleteJobAction;
import org.elasticsearch.xpack.ml.action.DeleteListAction; import org.elasticsearch.xpack.ml.action.DeleteListAction;
import org.elasticsearch.xpack.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.ml.action.DeleteModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.DeleteSchedulerAction; import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction;
import org.elasticsearch.xpack.ml.action.FlushJobAction; import org.elasticsearch.xpack.ml.action.FlushJobAction;
import org.elasticsearch.xpack.ml.action.GetBucketsAction; import org.elasticsearch.xpack.ml.action.GetBucketsAction;
import org.elasticsearch.xpack.ml.action.GetCategoriesAction; import org.elasticsearch.xpack.ml.action.GetCategoriesAction;
@ -41,20 +41,20 @@ import org.elasticsearch.xpack.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.ml.action.GetListAction; import org.elasticsearch.xpack.ml.action.GetListAction;
import org.elasticsearch.xpack.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.ml.action.GetModelSnapshotsAction;
import org.elasticsearch.xpack.ml.action.GetRecordsAction; import org.elasticsearch.xpack.ml.action.GetRecordsAction;
import org.elasticsearch.xpack.ml.action.GetSchedulersAction; import org.elasticsearch.xpack.ml.action.GetDatafeedsAction;
import org.elasticsearch.xpack.ml.action.GetSchedulersStatsAction; import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction;
import org.elasticsearch.xpack.ml.action.InternalStartSchedulerAction; import org.elasticsearch.xpack.ml.action.InternalStartDatafeedAction;
import org.elasticsearch.xpack.ml.action.OpenJobAction; import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PostDataAction; import org.elasticsearch.xpack.ml.action.PostDataAction;
import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.PutListAction; import org.elasticsearch.xpack.ml.action.PutListAction;
import org.elasticsearch.xpack.ml.action.PutSchedulerAction; import org.elasticsearch.xpack.ml.action.PutDatafeedAction;
import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.StartSchedulerAction; import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.ml.action.StopSchedulerAction; import org.elasticsearch.xpack.ml.action.StopDatafeedAction;
import org.elasticsearch.xpack.ml.action.UpdateJobStatusAction; import org.elasticsearch.xpack.ml.action.UpdateJobStatusAction;
import org.elasticsearch.xpack.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.ml.action.UpdateModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction; import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction;
import org.elasticsearch.xpack.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.ml.action.ValidateDetectorAction;
import org.elasticsearch.xpack.ml.action.ValidateTransformAction; import org.elasticsearch.xpack.ml.action.ValidateTransformAction;
import org.elasticsearch.xpack.ml.action.ValidateTransformsAction; import org.elasticsearch.xpack.ml.action.ValidateTransformsAction;
@ -101,16 +101,16 @@ import org.elasticsearch.xpack.ml.rest.results.RestGetBucketsAction;
import org.elasticsearch.xpack.ml.rest.results.RestGetCategoriesAction; import org.elasticsearch.xpack.ml.rest.results.RestGetCategoriesAction;
import org.elasticsearch.xpack.ml.rest.results.RestGetInfluencersAction; import org.elasticsearch.xpack.ml.rest.results.RestGetInfluencersAction;
import org.elasticsearch.xpack.ml.rest.results.RestGetRecordsAction; import org.elasticsearch.xpack.ml.rest.results.RestGetRecordsAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestDeleteSchedulerAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestDeleteDatafeedAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestGetSchedulersAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestGetDatafeedsAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestGetSchedulersStatsAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestGetDatafeedsStatsAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestPutSchedulerAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestPutDatafeedAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestStartSchedulerAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestStartDatafeedAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestStopSchedulerAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestStopDatafeedAction;
import org.elasticsearch.xpack.ml.rest.validate.RestValidateDetectorAction; import org.elasticsearch.xpack.ml.rest.validate.RestValidateDetectorAction;
import org.elasticsearch.xpack.ml.rest.validate.RestValidateTransformAction; import org.elasticsearch.xpack.ml.rest.validate.RestValidateTransformAction;
import org.elasticsearch.xpack.ml.rest.validate.RestValidateTransformsAction; import org.elasticsearch.xpack.ml.rest.validate.RestValidateTransformsAction;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunner; import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
import org.elasticsearch.xpack.ml.utils.NamedPipeHelper; import org.elasticsearch.xpack.ml.utils.NamedPipeHelper;
import java.io.IOException; import java.io.IOException;
@ -124,7 +124,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
public static final String NAME = "ml"; public static final String NAME = "ml";
public static final String BASE_PATH = "/_xpack/ml/"; public static final String BASE_PATH = "/_xpack/ml/";
public static final String THREAD_POOL_NAME = NAME; public static final String THREAD_POOL_NAME = NAME;
public static final String SCHEDULED_RUNNER_THREAD_POOL_NAME = NAME + "_scheduled_runner"; public static final String DATAFEED_RUNNER_THREAD_POOL_NAME = NAME + "_datafeed_runner";
public static final String AUTODETECT_PROCESS_THREAD_POOL_NAME = NAME + "_autodetect_process"; public static final String AUTODETECT_PROCESS_THREAD_POOL_NAME = NAME + "_autodetect_process";
// NORELEASE - temporary solution // NORELEASE - temporary solution
@ -199,7 +199,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
DataProcessor dataProcessor = new AutodetectProcessManager(settings, client, threadPool, jobManager, jobProvider, DataProcessor dataProcessor = new AutodetectProcessManager(settings, client, threadPool, jobManager, jobProvider,
jobResultsPersister, jobRenormalizedResultsPersister, jobDataCountsPersister, autodetectResultsParser, jobResultsPersister, jobRenormalizedResultsPersister, jobDataCountsPersister, autodetectResultsParser,
autodetectProcessFactory, normalizerFactory); autodetectProcessFactory, normalizerFactory);
ScheduledJobRunner scheduledJobRunner = new ScheduledJobRunner(threadPool, client, clusterService, jobProvider, DatafeedJobRunner datafeedJobRunner = new DatafeedJobRunner(threadPool, client, clusterService, jobProvider,
System::currentTimeMillis); System::currentTimeMillis);
JobLifeCycleService jobLifeCycleService = JobLifeCycleService jobLifeCycleService =
@ -225,7 +225,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
dataProcessor, dataProcessor,
new MlInitializationService(settings, threadPool, clusterService, jobProvider), new MlInitializationService(settings, threadPool, clusterService, jobProvider),
jobDataCountsPersister, jobDataCountsPersister,
scheduledJobRunner datafeedJobRunner
); );
} }
@ -253,12 +253,12 @@ public class MlPlugin extends Plugin implements ActionPlugin {
RestGetModelSnapshotsAction.class, RestGetModelSnapshotsAction.class,
RestRevertModelSnapshotAction.class, RestRevertModelSnapshotAction.class,
RestUpdateModelSnapshotAction.class, RestUpdateModelSnapshotAction.class,
RestGetSchedulersAction.class, RestGetDatafeedsAction.class,
RestGetSchedulersStatsAction.class, RestGetDatafeedsStatsAction.class,
RestPutSchedulerAction.class, RestPutDatafeedAction.class,
RestDeleteSchedulerAction.class, RestDeleteDatafeedAction.class,
RestStartSchedulerAction.class, RestStartDatafeedAction.class,
RestStopSchedulerAction.class, RestStopDatafeedAction.class,
RestDeleteModelSnapshotAction.class RestDeleteModelSnapshotAction.class
); );
} }
@ -272,7 +272,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
new ActionHandler<>(DeleteJobAction.INSTANCE, DeleteJobAction.TransportAction.class), new ActionHandler<>(DeleteJobAction.INSTANCE, DeleteJobAction.TransportAction.class),
new ActionHandler<>(OpenJobAction.INSTANCE, OpenJobAction.TransportAction.class), new ActionHandler<>(OpenJobAction.INSTANCE, OpenJobAction.TransportAction.class),
new ActionHandler<>(UpdateJobStatusAction.INSTANCE, UpdateJobStatusAction.TransportAction.class), new ActionHandler<>(UpdateJobStatusAction.INSTANCE, UpdateJobStatusAction.TransportAction.class),
new ActionHandler<>(UpdateSchedulerStatusAction.INSTANCE, UpdateSchedulerStatusAction.TransportAction.class), new ActionHandler<>(UpdateDatafeedStatusAction.INSTANCE, UpdateDatafeedStatusAction.TransportAction.class),
new ActionHandler<>(GetListAction.INSTANCE, GetListAction.TransportAction.class), new ActionHandler<>(GetListAction.INSTANCE, GetListAction.TransportAction.class),
new ActionHandler<>(PutListAction.INSTANCE, PutListAction.TransportAction.class), new ActionHandler<>(PutListAction.INSTANCE, PutListAction.TransportAction.class),
new ActionHandler<>(DeleteListAction.INSTANCE, DeleteListAction.TransportAction.class), new ActionHandler<>(DeleteListAction.INSTANCE, DeleteListAction.TransportAction.class),
@ -289,13 +289,13 @@ public class MlPlugin extends Plugin implements ActionPlugin {
new ActionHandler<>(GetModelSnapshotsAction.INSTANCE, GetModelSnapshotsAction.TransportAction.class), new ActionHandler<>(GetModelSnapshotsAction.INSTANCE, GetModelSnapshotsAction.TransportAction.class),
new ActionHandler<>(RevertModelSnapshotAction.INSTANCE, RevertModelSnapshotAction.TransportAction.class), new ActionHandler<>(RevertModelSnapshotAction.INSTANCE, RevertModelSnapshotAction.TransportAction.class),
new ActionHandler<>(UpdateModelSnapshotAction.INSTANCE, UpdateModelSnapshotAction.TransportAction.class), new ActionHandler<>(UpdateModelSnapshotAction.INSTANCE, UpdateModelSnapshotAction.TransportAction.class),
new ActionHandler<>(GetSchedulersAction.INSTANCE, GetSchedulersAction.TransportAction.class), new ActionHandler<>(GetDatafeedsAction.INSTANCE, GetDatafeedsAction.TransportAction.class),
new ActionHandler<>(GetSchedulersStatsAction.INSTANCE, GetSchedulersStatsAction.TransportAction.class), new ActionHandler<>(GetDatafeedsStatsAction.INSTANCE, GetDatafeedsStatsAction.TransportAction.class),
new ActionHandler<>(PutSchedulerAction.INSTANCE, PutSchedulerAction.TransportAction.class), new ActionHandler<>(PutDatafeedAction.INSTANCE, PutDatafeedAction.TransportAction.class),
new ActionHandler<>(DeleteSchedulerAction.INSTANCE, DeleteSchedulerAction.TransportAction.class), new ActionHandler<>(DeleteDatafeedAction.INSTANCE, DeleteDatafeedAction.TransportAction.class),
new ActionHandler<>(StartSchedulerAction.INSTANCE, StartSchedulerAction.TransportAction.class), new ActionHandler<>(StartDatafeedAction.INSTANCE, StartDatafeedAction.TransportAction.class),
new ActionHandler<>(InternalStartSchedulerAction.INSTANCE, InternalStartSchedulerAction.TransportAction.class), new ActionHandler<>(InternalStartDatafeedAction.INSTANCE, InternalStartDatafeedAction.TransportAction.class),
new ActionHandler<>(StopSchedulerAction.INSTANCE, StopSchedulerAction.TransportAction.class), new ActionHandler<>(StopDatafeedAction.INSTANCE, StopDatafeedAction.TransportAction.class),
new ActionHandler<>(DeleteModelSnapshotAction.INSTANCE, DeleteModelSnapshotAction.TransportAction.class) new ActionHandler<>(DeleteModelSnapshotAction.INSTANCE, DeleteModelSnapshotAction.TransportAction.class)
); );
} }
@ -310,15 +310,15 @@ public class MlPlugin extends Plugin implements ActionPlugin {
FixedExecutorBuilder ml = new FixedExecutorBuilder(settings, THREAD_POOL_NAME, FixedExecutorBuilder ml = new FixedExecutorBuilder(settings, THREAD_POOL_NAME,
maxNumberOfJobs * 2, 1000, "xpack.ml.thread_pool"); maxNumberOfJobs * 2, 1000, "xpack.ml.thread_pool");
// fail quick to run autodetect process / scheduler, so no queues // fail quick to run autodetect process / datafeed, so no queues
// 4 threads: for c++ logging, result processing, state processing and restore state // 4 threads: for c++ logging, result processing, state processing and restore state
FixedExecutorBuilder autoDetect = new FixedExecutorBuilder(settings, AUTODETECT_PROCESS_THREAD_POOL_NAME, FixedExecutorBuilder autoDetect = new FixedExecutorBuilder(settings, AUTODETECT_PROCESS_THREAD_POOL_NAME,
maxNumberOfJobs * 4, 4, "xpack.ml.autodetect_process_thread_pool"); maxNumberOfJobs * 4, 4, "xpack.ml.autodetect_process_thread_pool");
// TODO: if scheduled and non scheduled jobs are considered more equal and the scheduler and // TODO: if datafeed and non datafeed jobs are considered more equal and the datafeed and
// autodetect process are created at the same time then these two different TPs can merge. // autodetect process are created at the same time then these two different TPs can merge.
FixedExecutorBuilder scheduler = new FixedExecutorBuilder(settings, SCHEDULED_RUNNER_THREAD_POOL_NAME, FixedExecutorBuilder datafeed = new FixedExecutorBuilder(settings, DATAFEED_RUNNER_THREAD_POOL_NAME,
maxNumberOfJobs, 1, "xpack.ml.scheduler_thread_pool"); maxNumberOfJobs, 1, "xpack.ml.datafeed_thread_pool");
return Arrays.asList(ml, autoDetect, scheduler); return Arrays.asList(ml, autoDetect, datafeed);
} }
} }

View File

@ -29,20 +29,20 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request, DeleteSchedulerAction.Response, public class DeleteDatafeedAction extends Action<DeleteDatafeedAction.Request, DeleteDatafeedAction.Response,
DeleteSchedulerAction.RequestBuilder> { DeleteDatafeedAction.RequestBuilder> {
public static final DeleteSchedulerAction INSTANCE = new DeleteSchedulerAction(); public static final DeleteDatafeedAction INSTANCE = new DeleteDatafeedAction();
public static final String NAME = "cluster:admin/ml/scheduler/delete"; public static final String NAME = "cluster:admin/ml/datafeed/delete";
private DeleteSchedulerAction() { private DeleteDatafeedAction() {
super(NAME); super(NAME);
} }
@ -58,17 +58,17 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
public static class Request extends AcknowledgedRequest<Request> implements ToXContent { public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
private String schedulerId; private String datafeedId;
public Request(String schedulerId) { public Request(String datafeedId) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName()); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
} }
Request() { Request() {
} }
public String getSchedulerId() { public String getDatafeedId() {
return schedulerId; return datafeedId;
} }
@Override @Override
@ -79,18 +79,18 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
schedulerId = in.readString(); datafeedId = in.readString();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(schedulerId); out.writeString(datafeedId);
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(SchedulerConfig.ID.getPreferredName(), schedulerId); builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
return builder; return builder;
} }
@ -99,18 +99,18 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o; Request request = (Request) o;
return Objects.equals(schedulerId, request.schedulerId); return Objects.equals(datafeedId, request.datafeedId);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulerId); return Objects.hash(datafeedId);
} }
} }
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> { public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, DeleteSchedulerAction action) { public RequestBuilder(ElasticsearchClient client, DeleteDatafeedAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
@ -142,7 +142,7 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, DeleteSchedulerAction.NAME, transportService, clusterService, threadPool, actionFilters, super(settings, DeleteDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new); indexNameExpressionResolver, Request::new);
} }
@ -158,7 +158,7 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
@Override @Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception { protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
clusterService.submitStateUpdateTask("delete-scheduler-" + request.getSchedulerId(), clusterService.submitStateUpdateTask("delete-datafeed-" + request.getDatafeedId(),
new AckedClusterStateUpdateTask<Response>(request, listener) { new AckedClusterStateUpdateTask<Response>(request, listener) {
@Override @Override
@ -170,7 +170,7 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
public ClusterState execute(ClusterState currentState) throws Exception { public ClusterState execute(ClusterState currentState) throws Exception {
MlMetadata currentMetadata = state.getMetaData().custom(MlMetadata.TYPE); MlMetadata currentMetadata = state.getMetaData().custom(MlMetadata.TYPE);
MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata)
.removeScheduler(request.getSchedulerId()).build(); .removeDatafeed(request.getDatafeedId()).build();
return ClusterState.builder(state).metaData( return ClusterState.builder(state).metaData(
MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build()) MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build())
.build(); .build();

View File

@ -30,8 +30,8 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage; import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.Scheduler; import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException; import java.io.IOException;
@ -40,15 +40,15 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class GetSchedulersAction extends Action<GetSchedulersAction.Request, GetSchedulersAction.Response, public class GetDatafeedsAction extends Action<GetDatafeedsAction.Request, GetDatafeedsAction.Response,
GetSchedulersAction.RequestBuilder> { GetDatafeedsAction.RequestBuilder> {
public static final GetSchedulersAction INSTANCE = new GetSchedulersAction(); public static final GetDatafeedsAction INSTANCE = new GetDatafeedsAction();
public static final String NAME = "cluster:admin/ml/schedulers/get"; public static final String NAME = "cluster:admin/ml/datafeeds/get";
private static final String ALL = "_all"; private static final String ALL = "_all";
private GetSchedulersAction() { private GetDatafeedsAction() {
super(NAME); super(NAME);
} }
@ -64,16 +64,16 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
public static class Request extends MasterNodeReadRequest<Request> { public static class Request extends MasterNodeReadRequest<Request> {
private String schedulerId; private String datafeedId;
public Request(String schedulerId) { public Request(String datafeedId) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName()); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
} }
Request() {} Request() {}
public String getSchedulerId() { public String getDatafeedId() {
return schedulerId; return datafeedId;
} }
@Override @Override
@ -84,18 +84,18 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
schedulerId = in.readString(); datafeedId = in.readString();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(schedulerId); out.writeString(datafeedId);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulerId); return Objects.hash(datafeedId);
} }
@Override @Override
@ -107,54 +107,54 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
return false; return false;
} }
Request other = (Request) obj; Request other = (Request) obj;
return Objects.equals(schedulerId, other.schedulerId); return Objects.equals(datafeedId, other.datafeedId);
} }
} }
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> { public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, GetSchedulersAction action) { public RequestBuilder(ElasticsearchClient client, GetDatafeedsAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
public static class Response extends ActionResponse implements ToXContentObject { public static class Response extends ActionResponse implements ToXContentObject {
private QueryPage<SchedulerConfig> schedulers; private QueryPage<DatafeedConfig> datafeeds;
public Response(QueryPage<SchedulerConfig> schedulers) { public Response(QueryPage<DatafeedConfig> datafeeds) {
this.schedulers = schedulers; this.datafeeds = datafeeds;
} }
public Response() {} public Response() {}
public QueryPage<SchedulerConfig> getResponse() { public QueryPage<DatafeedConfig> getResponse() {
return schedulers; return datafeeds;
} }
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
schedulers = new QueryPage<>(in, SchedulerConfig::new); datafeeds = new QueryPage<>(in, DatafeedConfig::new);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
schedulers.writeTo(out); datafeeds.writeTo(out);
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
schedulers.doXContentBody(builder, params); datafeeds.doXContentBody(builder, params);
builder.endObject(); builder.endObject();
return builder; return builder;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulers); return Objects.hash(datafeeds);
} }
@Override @Override
@ -166,7 +166,7 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
return false; return false;
} }
Response other = (Response) obj; Response other = (Response) obj;
return Objects.equals(schedulers, other.schedulers); return Objects.equals(datafeeds, other.datafeeds);
} }
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@ -191,7 +191,7 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, GetSchedulersAction.NAME, transportService, clusterService, threadPool, actionFilters, super(settings, GetDatafeedsAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new); indexNameExpressionResolver, Request::new);
} }
@ -207,20 +207,20 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
@Override @Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception { protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
logger.debug("Get scheduler '{}'", request.getSchedulerId()); logger.debug("Get datafeed '{}'", request.getDatafeedId());
QueryPage<SchedulerConfig> response = null; QueryPage<DatafeedConfig> response = null;
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE); MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
if (ALL.equals(request.getSchedulerId())) { if (ALL.equals(request.getDatafeedId())) {
List<SchedulerConfig> schedulerConfigs = mlMetadata.getSchedulers().values().stream().map( List<DatafeedConfig> datafeedConfigs = mlMetadata.getDatafeeds().values().stream().map(
s -> s.getConfig()).collect(Collectors.toList()); s -> s.getConfig()).collect(Collectors.toList());
response = new QueryPage<>(schedulerConfigs, schedulerConfigs.size(), Scheduler.RESULTS_FIELD); response = new QueryPage<>(datafeedConfigs, datafeedConfigs.size(), Datafeed.RESULTS_FIELD);
} else { } else {
Scheduler scheduler = mlMetadata.getScheduler(request.getSchedulerId()); Datafeed datafeed = mlMetadata.getDatafeed(request.getDatafeedId());
if (scheduler == null) { if (datafeed == null) {
throw ExceptionsHelper.missingSchedulerException(request.getSchedulerId()); throw ExceptionsHelper.missingDatafeedException(request.getDatafeedId());
} }
response = new QueryPage<>(Collections.singletonList(scheduler.getConfig()), 1, Scheduler.RESULTS_FIELD); response = new QueryPage<>(Collections.singletonList(datafeed.getConfig()), 1, Datafeed.RESULTS_FIELD);
} }
listener.onResponse(new Response(response)); listener.onResponse(new Response(response));

View File

@ -32,9 +32,9 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage; import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.Scheduler; import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException; import java.io.IOException;
@ -43,16 +43,16 @@ import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Request, GetSchedulersStatsAction.Response, public class GetDatafeedsStatsAction extends Action<GetDatafeedsStatsAction.Request, GetDatafeedsStatsAction.Response,
GetSchedulersStatsAction.RequestBuilder> { GetDatafeedsStatsAction.RequestBuilder> {
public static final GetSchedulersStatsAction INSTANCE = new GetSchedulersStatsAction(); public static final GetDatafeedsStatsAction INSTANCE = new GetDatafeedsStatsAction();
public static final String NAME = "cluster:admin/ml/schedulers/stats/get"; public static final String NAME = "cluster:admin/ml/datafeeds/stats/get";
private static final String ALL = "_all"; private static final String ALL = "_all";
private static final String STATUS = "status"; private static final String STATUS = "status";
private GetSchedulersStatsAction() { private GetDatafeedsStatsAction() {
super(NAME); super(NAME);
} }
@ -68,16 +68,16 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
public static class Request extends MasterNodeReadRequest<Request> { public static class Request extends MasterNodeReadRequest<Request> {
private String schedulerId; private String datafeedId;
public Request(String schedulerId) { public Request(String datafeedId) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName()); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
} }
Request() {} Request() {}
public String getSchedulerId() { public String getDatafeedId() {
return schedulerId; return datafeedId;
} }
@Override @Override
@ -88,18 +88,18 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
schedulerId = in.readString(); datafeedId = in.readString();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(schedulerId); out.writeString(datafeedId);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulerId); return Objects.hash(datafeedId);
} }
@Override @Override
@ -111,47 +111,47 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
return false; return false;
} }
Request other = (Request) obj; Request other = (Request) obj;
return Objects.equals(schedulerId, other.schedulerId); return Objects.equals(datafeedId, other.datafeedId);
} }
} }
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> { public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, GetSchedulersStatsAction action) { public RequestBuilder(ElasticsearchClient client, GetDatafeedsStatsAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
public static class Response extends ActionResponse implements ToXContentObject { public static class Response extends ActionResponse implements ToXContentObject {
public static class SchedulerStats implements ToXContent, Writeable { public static class DatafeedStats implements ToXContent, Writeable {
private final String schedulerId; private final String datafeedId;
private final SchedulerStatus schedulerStatus; private final DatafeedStatus datafeedStatus;
SchedulerStats(String schedulerId, SchedulerStatus schedulerStatus) { DatafeedStats(String datafeedId, DatafeedStatus datafeedStatus) {
this.schedulerId = Objects.requireNonNull(schedulerId); this.datafeedId = Objects.requireNonNull(datafeedId);
this.schedulerStatus = Objects.requireNonNull(schedulerStatus); this.datafeedStatus = Objects.requireNonNull(datafeedStatus);
} }
SchedulerStats(StreamInput in) throws IOException { DatafeedStats(StreamInput in) throws IOException {
schedulerId = in.readString(); datafeedId = in.readString();
schedulerStatus = SchedulerStatus.fromStream(in); datafeedStatus = DatafeedStatus.fromStream(in);
} }
public String getSchedulerId() { public String getDatafeedId() {
return schedulerId; return datafeedId;
} }
public SchedulerStatus getSchedulerStatus() { public DatafeedStatus getDatafeedStatus() {
return schedulerStatus; return datafeedStatus;
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(SchedulerConfig.ID.getPreferredName(), schedulerId); builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
builder.field(STATUS, schedulerStatus); builder.field(STATUS, datafeedStatus);
builder.endObject(); builder.endObject();
return builder; return builder;
@ -159,13 +159,13 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(schedulerId); out.writeString(datafeedId);
schedulerStatus.writeTo(out); datafeedStatus.writeTo(out);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulerId, schedulerStatus); return Objects.hash(datafeedId, datafeedStatus);
} }
@Override @Override
@ -176,46 +176,46 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
if (getClass() != obj.getClass()) { if (getClass() != obj.getClass()) {
return false; return false;
} }
GetSchedulersStatsAction.Response.SchedulerStats other = (GetSchedulersStatsAction.Response.SchedulerStats) obj; GetDatafeedsStatsAction.Response.DatafeedStats other = (GetDatafeedsStatsAction.Response.DatafeedStats) obj;
return Objects.equals(schedulerId, other.schedulerId) && Objects.equals(this.schedulerStatus, other.schedulerStatus); return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(this.datafeedStatus, other.datafeedStatus);
} }
} }
private QueryPage<SchedulerStats> schedulersStats; private QueryPage<DatafeedStats> datafeedsStats;
public Response(QueryPage<SchedulerStats> schedulersStats) { public Response(QueryPage<DatafeedStats> datafeedsStats) {
this.schedulersStats = schedulersStats; this.datafeedsStats = datafeedsStats;
} }
public Response() {} public Response() {}
public QueryPage<SchedulerStats> getResponse() { public QueryPage<DatafeedStats> getResponse() {
return schedulersStats; return datafeedsStats;
} }
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
schedulersStats = new QueryPage<>(in, SchedulerStats::new); datafeedsStats = new QueryPage<>(in, DatafeedStats::new);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
schedulersStats.writeTo(out); datafeedsStats.writeTo(out);
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
schedulersStats.doXContentBody(builder, params); datafeedsStats.doXContentBody(builder, params);
builder.endObject(); builder.endObject();
return builder; return builder;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulersStats); return Objects.hash(datafeedsStats);
} }
@Override @Override
@ -227,7 +227,7 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
return false; return false;
} }
Response other = (Response) obj; Response other = (Response) obj;
return Objects.equals(schedulersStats, other.schedulersStats); return Objects.equals(datafeedsStats, other.datafeedsStats);
} }
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@ -252,7 +252,7 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, GetSchedulersStatsAction.NAME, transportService, clusterService, threadPool, actionFilters, super(settings, GetDatafeedsStatsAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new); indexNameExpressionResolver, Request::new);
} }
@ -268,24 +268,24 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
@Override @Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception { protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
logger.debug("Get stats for scheduler '{}'", request.getSchedulerId()); logger.debug("Get stats for datafeed '{}'", request.getDatafeedId());
List<Response.SchedulerStats> stats = new ArrayList<>(); List<Response.DatafeedStats> stats = new ArrayList<>();
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE); MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
if (ALL.equals(request.getSchedulerId())) { if (ALL.equals(request.getDatafeedId())) {
Collection<Scheduler> schedulers = mlMetadata.getSchedulers().values(); Collection<Datafeed> datafeeds = mlMetadata.getDatafeeds().values();
for (Scheduler scheduler : schedulers) { for (Datafeed datafeed : datafeeds) {
stats.add(new Response.SchedulerStats(scheduler.getId(), scheduler.getStatus())); stats.add(new Response.DatafeedStats(datafeed.getId(), datafeed.getStatus()));
} }
} else { } else {
Scheduler scheduler = mlMetadata.getScheduler(request.getSchedulerId()); Datafeed datafeed = mlMetadata.getDatafeed(request.getDatafeedId());
if (scheduler == null) { if (datafeed == null) {
throw ExceptionsHelper.missingSchedulerException(request.getSchedulerId()); throw ExceptionsHelper.missingDatafeedException(request.getDatafeedId());
} }
stats.add(new Response.SchedulerStats(scheduler.getId(), scheduler.getStatus())); stats.add(new Response.DatafeedStats(datafeed.getId(), datafeed.getStatus()));
} }
QueryPage<Response.SchedulerStats> statsPage = new QueryPage<>(stats, stats.size(), Scheduler.RESULTS_FIELD); QueryPage<Response.DatafeedStats> statsPage = new QueryPage<>(stats, stats.size(), Datafeed.RESULTS_FIELD);
listener.onResponse(new Response(statsPage)); listener.onResponse(new Response(statsPage));
} }

View File

@ -20,15 +20,15 @@ import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunner; import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
public class InternalStartSchedulerAction extends public class InternalStartDatafeedAction extends
Action<InternalStartSchedulerAction.Request, InternalStartSchedulerAction.Response, InternalStartSchedulerAction.RequestBuilder> { Action<InternalStartDatafeedAction.Request, InternalStartDatafeedAction.Response, InternalStartDatafeedAction.RequestBuilder> {
public static final InternalStartSchedulerAction INSTANCE = new InternalStartSchedulerAction(); public static final InternalStartDatafeedAction INSTANCE = new InternalStartDatafeedAction();
public static final String NAME = "cluster:admin/ml/scheduler/internal_start"; public static final String NAME = "cluster:admin/ml/datafeed/internal_start";
private InternalStartSchedulerAction() { private InternalStartDatafeedAction() {
super(NAME); super(NAME);
} }
@ -42,10 +42,10 @@ public class InternalStartSchedulerAction extends
return new Response(); return new Response();
} }
public static class Request extends StartSchedulerAction.Request { public static class Request extends StartDatafeedAction.Request {
Request(String schedulerId, long startTime) { Request(String datafeedId, long startTime) {
super(schedulerId, startTime); super(datafeedId, startTime);
} }
Request() { Request() {
@ -53,13 +53,13 @@ public class InternalStartSchedulerAction extends
@Override @Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) { public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new SchedulerTask(id, type, action, parentTaskId, getSchedulerId()); return new DatafeedTask(id, type, action, parentTaskId, getDatafeedId());
} }
} }
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> { static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, InternalStartSchedulerAction action) { public RequestBuilder(ElasticsearchClient client, InternalStartDatafeedAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
@ -71,15 +71,15 @@ public class InternalStartSchedulerAction extends
} }
public static class SchedulerTask extends CancellableTask { public static class DatafeedTask extends CancellableTask {
private volatile ScheduledJobRunner.Holder holder; private volatile DatafeedJobRunner.Holder holder;
public SchedulerTask(long id, String type, String action, TaskId parentTaskId, String schedulerId) { public DatafeedTask(long id, String type, String action, TaskId parentTaskId, String datafeedId) {
super(id, type, action, "scheduler-" + schedulerId, parentTaskId); super(id, type, action, "datafeed-" + datafeedId, parentTaskId);
} }
public void setHolder(ScheduledJobRunner.Holder holder) { public void setHolder(DatafeedJobRunner.Holder holder) {
this.holder = holder; this.holder = holder;
} }
@ -98,21 +98,21 @@ public class InternalStartSchedulerAction extends
public static class TransportAction extends HandledTransportAction<Request, Response> { public static class TransportAction extends HandledTransportAction<Request, Response> {
private final ScheduledJobRunner scheduledJobRunner; private final DatafeedJobRunner datafeedJobRunner;
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ScheduledJobRunner scheduledJobRunner) { DatafeedJobRunner datafeedJobRunner) {
super(settings, InternalStartSchedulerAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, super(settings, InternalStartDatafeedAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
Request::new); Request::new);
this.scheduledJobRunner = scheduledJobRunner; this.datafeedJobRunner = datafeedJobRunner;
} }
@Override @Override
protected void doExecute(Task task, Request request, ActionListener<Response> listener) { protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
SchedulerTask schedulerTask = (SchedulerTask) task; DatafeedTask datafeedTask = (DatafeedTask) task;
scheduledJobRunner.run(request.getSchedulerId(), request.getStartTime(), request.getEndTime(), schedulerTask, (error) -> { datafeedJobRunner.run(request.getDatafeedId(), request.getStartTime(), request.getEndTime(), datafeedTask, (error) -> {
if (error != null) { if (error != null) {
listener.onFailure(error); listener.onFailure(error);
} else { } else {

View File

@ -32,17 +32,17 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSchedulerAction.Response, PutSchedulerAction.RequestBuilder> { public class PutDatafeedAction extends Action<PutDatafeedAction.Request, PutDatafeedAction.Response, PutDatafeedAction.RequestBuilder> {
public static final PutSchedulerAction INSTANCE = new PutSchedulerAction(); public static final PutDatafeedAction INSTANCE = new PutDatafeedAction();
public static final String NAME = "cluster:admin/ml/scheduler/put"; public static final String NAME = "cluster:admin/ml/datafeed/put";
private PutSchedulerAction() { private PutDatafeedAction() {
super(NAME); super(NAME);
} }
@ -58,23 +58,23 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
public static class Request extends AcknowledgedRequest<Request> implements ToXContent { public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
public static Request parseRequest(String schedulerId, XContentParser parser) { public static Request parseRequest(String datafeedId, XContentParser parser) {
SchedulerConfig.Builder scheduler = SchedulerConfig.PARSER.apply(parser, null); DatafeedConfig.Builder datafeed = DatafeedConfig.PARSER.apply(parser, null);
scheduler.setId(schedulerId); datafeed.setId(datafeedId);
return new Request(scheduler.build()); return new Request(datafeed.build());
} }
private SchedulerConfig scheduler; private DatafeedConfig datafeed;
public Request(SchedulerConfig scheduler) { public Request(DatafeedConfig datafeed) {
this.scheduler = scheduler; this.datafeed = datafeed;
} }
Request() { Request() {
} }
public SchedulerConfig getScheduler() { public DatafeedConfig getDatafeed() {
return scheduler; return datafeed;
} }
@Override @Override
@ -85,18 +85,18 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
scheduler = new SchedulerConfig(in); datafeed = new DatafeedConfig(in);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
scheduler.writeTo(out); datafeed.writeTo(out);
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
scheduler.toXContent(builder, params); datafeed.toXContent(builder, params);
return builder; return builder;
} }
@ -105,56 +105,56 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o; Request request = (Request) o;
return Objects.equals(scheduler, request.scheduler); return Objects.equals(datafeed, request.datafeed);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(scheduler); return Objects.hash(datafeed);
} }
} }
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> { public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, PutSchedulerAction action) { public RequestBuilder(ElasticsearchClient client, PutDatafeedAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
public static class Response extends AcknowledgedResponse implements ToXContentObject { public static class Response extends AcknowledgedResponse implements ToXContentObject {
private SchedulerConfig scheduler; private DatafeedConfig datafeed;
public Response(boolean acked, SchedulerConfig scheduler) { public Response(boolean acked, DatafeedConfig datafeed) {
super(acked); super(acked);
this.scheduler = scheduler; this.datafeed = datafeed;
} }
Response() { Response() {
} }
public SchedulerConfig getResponse() { public DatafeedConfig getResponse() {
return scheduler; return datafeed;
} }
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
readAcknowledged(in); readAcknowledged(in);
scheduler = new SchedulerConfig(in); datafeed = new DatafeedConfig(in);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
writeAcknowledged(out); writeAcknowledged(out);
scheduler.writeTo(out); datafeed.writeTo(out);
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
scheduler.doXContentBody(builder, params); datafeed.doXContentBody(builder, params);
builder.endObject(); builder.endObject();
return builder; return builder;
} }
@ -164,12 +164,12 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
Response response = (Response) o; Response response = (Response) o;
return Objects.equals(scheduler, response.scheduler); return Objects.equals(datafeed, response.datafeed);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(scheduler); return Objects.hash(datafeed);
} }
} }
@ -179,7 +179,7 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) { IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, PutSchedulerAction.NAME, transportService, clusterService, threadPool, actionFilters, super(settings, PutDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new); indexNameExpressionResolver, Request::new);
} }
@ -195,28 +195,28 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
@Override @Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception { protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
clusterService.submitStateUpdateTask("put-scheduler-" + request.getScheduler().getId(), clusterService.submitStateUpdateTask("put-datafeed-" + request.getDatafeed().getId(),
new AckedClusterStateUpdateTask<Response>(request, listener) { new AckedClusterStateUpdateTask<Response>(request, listener) {
@Override @Override
protected Response newResponse(boolean acknowledged) { protected Response newResponse(boolean acknowledged) {
if (acknowledged) { if (acknowledged) {
logger.info("Created scheduler [{}]", request.getScheduler().getId()); logger.info("Created datafeed [{}]", request.getDatafeed().getId());
} }
return new Response(acknowledged, request.getScheduler()); return new Response(acknowledged, request.getDatafeed());
} }
@Override @Override
public ClusterState execute(ClusterState currentState) throws Exception { public ClusterState execute(ClusterState currentState) throws Exception {
return putScheduler(request, currentState); return putDatafeed(request, currentState);
} }
}); });
} }
private ClusterState putScheduler(Request request, ClusterState clusterState) { private ClusterState putDatafeed(Request request, ClusterState clusterState) {
MlMetadata currentMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE); MlMetadata currentMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE);
MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata)
.putScheduler(request.getScheduler()).build(); .putDatafeed(request.getDatafeed()).build();
return ClusterState.builder(clusterState).metaData( return ClusterState.builder(clusterState).metaData(
MetaData.builder(clusterState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build()) MetaData.builder(clusterState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build())
.build(); .build();

View File

@ -31,26 +31,26 @@ import org.elasticsearch.tasks.LoggingTaskListener;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunner; import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.utils.SchedulerStatusObserver; import org.elasticsearch.xpack.ml.utils.DatafeedStatusObserver;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class StartSchedulerAction public class StartDatafeedAction
extends Action<StartSchedulerAction.Request, StartSchedulerAction.Response, StartSchedulerAction.RequestBuilder> { extends Action<StartDatafeedAction.Request, StartDatafeedAction.Response, StartDatafeedAction.RequestBuilder> {
public static final ParseField START_TIME = new ParseField("start"); public static final ParseField START_TIME = new ParseField("start");
public static final ParseField END_TIME = new ParseField("end"); public static final ParseField END_TIME = new ParseField("end");
public static final ParseField START_TIMEOUT = new ParseField("start_timeout"); public static final ParseField START_TIMEOUT = new ParseField("start_timeout");
public static final StartSchedulerAction INSTANCE = new StartSchedulerAction(); public static final StartDatafeedAction INSTANCE = new StartDatafeedAction();
public static final String NAME = "cluster:admin/ml/scheduler/start"; public static final String NAME = "cluster:admin/ml/datafeed/start";
private StartSchedulerAction() { private StartDatafeedAction() {
super(NAME); super(NAME);
} }
@ -69,36 +69,36 @@ public class StartSchedulerAction
public static ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new); public static ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
static { static {
PARSER.declareString((request, schedulerId) -> request.schedulerId = schedulerId, SchedulerConfig.ID); PARSER.declareString((request, datafeedId) -> request.datafeedId = datafeedId, DatafeedConfig.ID);
PARSER.declareLong((request, startTime) -> request.startTime = startTime, START_TIME); PARSER.declareLong((request, startTime) -> request.startTime = startTime, START_TIME);
PARSER.declareLong(Request::setEndTime, END_TIME); PARSER.declareLong(Request::setEndTime, END_TIME);
PARSER.declareString((request, val) -> request.setStartTimeout(TimeValue.parseTimeValue(val, PARSER.declareString((request, val) -> request.setStartTimeout(TimeValue.parseTimeValue(val,
START_TIME.getPreferredName())), START_TIMEOUT); START_TIME.getPreferredName())), START_TIMEOUT);
} }
public static Request parseRequest(String schedulerId, XContentParser parser) { public static Request parseRequest(String datafeedId, XContentParser parser) {
Request request = PARSER.apply(parser, null); Request request = PARSER.apply(parser, null);
if (schedulerId != null) { if (datafeedId != null) {
request.schedulerId = schedulerId; request.datafeedId = datafeedId;
} }
return request; return request;
} }
private String schedulerId; private String datafeedId;
private long startTime; private long startTime;
private Long endTime; private Long endTime;
private TimeValue startTimeout = TimeValue.timeValueSeconds(30); private TimeValue startTimeout = TimeValue.timeValueSeconds(30);
public Request(String schedulerId, long startTime) { public Request(String datafeedId, long startTime) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName()); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
this.startTime = startTime; this.startTime = startTime;
} }
Request() { Request() {
} }
public String getSchedulerId() { public String getDatafeedId() {
return schedulerId; return datafeedId;
} }
public long getStartTime() { public long getStartTime() {
@ -129,7 +129,7 @@ public class StartSchedulerAction
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
schedulerId = in.readString(); datafeedId = in.readString();
startTime = in.readVLong(); startTime = in.readVLong();
endTime = in.readOptionalLong(); endTime = in.readOptionalLong();
startTimeout = new TimeValue(in.readVLong()); startTimeout = new TimeValue(in.readVLong());
@ -138,7 +138,7 @@ public class StartSchedulerAction
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(schedulerId); out.writeString(datafeedId);
out.writeVLong(startTime); out.writeVLong(startTime);
out.writeOptionalLong(endTime); out.writeOptionalLong(endTime);
out.writeVLong(startTimeout.millis()); out.writeVLong(startTimeout.millis());
@ -147,7 +147,7 @@ public class StartSchedulerAction
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
builder.field(SchedulerConfig.ID.getPreferredName(), schedulerId); builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
builder.field(START_TIME.getPreferredName(), startTime); builder.field(START_TIME.getPreferredName(), startTime);
if (endTime != null) { if (endTime != null) {
builder.field(END_TIME.getPreferredName(), endTime); builder.field(END_TIME.getPreferredName(), endTime);
@ -158,7 +158,7 @@ public class StartSchedulerAction
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulerId, startTime, endTime); return Objects.hash(datafeedId, startTime, endTime);
} }
@Override @Override
@ -170,7 +170,7 @@ public class StartSchedulerAction
return false; return false;
} }
Request other = (Request) obj; Request other = (Request) obj;
return Objects.equals(schedulerId, other.schedulerId) && return Objects.equals(datafeedId, other.datafeedId) &&
Objects.equals(startTime, other.startTime) && Objects.equals(startTime, other.startTime) &&
Objects.equals(endTime, other.endTime); Objects.equals(endTime, other.endTime);
} }
@ -178,7 +178,7 @@ public class StartSchedulerAction
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> { static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, StartSchedulerAction action) { public RequestBuilder(ElasticsearchClient client, StartDatafeedAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
@ -222,33 +222,33 @@ public class StartSchedulerAction
public static class TransportAction extends HandledTransportAction<Request, Response> { public static class TransportAction extends HandledTransportAction<Request, Response> {
private final ClusterService clusterService; private final ClusterService clusterService;
private final SchedulerStatusObserver schedulerStatusObserver; private final DatafeedStatusObserver datafeedStatusObserver;
private final InternalStartSchedulerAction.TransportAction transportAction; private final InternalStartDatafeedAction.TransportAction transportAction;
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ClusterService clusterService, InternalStartSchedulerAction.TransportAction transportAction) { ClusterService clusterService, InternalStartDatafeedAction.TransportAction transportAction) {
super(settings, StartSchedulerAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, super(settings, StartDatafeedAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
Request::new); Request::new);
this.clusterService = clusterService; this.clusterService = clusterService;
this.schedulerStatusObserver = new SchedulerStatusObserver(threadPool, clusterService); this.datafeedStatusObserver = new DatafeedStatusObserver(threadPool, clusterService);
this.transportAction = transportAction; this.transportAction = transportAction;
} }
@Override @Override
protected void doExecute(Request request, ActionListener<Response> listener) { protected void doExecute(Request request, ActionListener<Response> listener) {
// This validation happens also in ScheduledJobRunner, the reason we do it here too is that if it fails there // This validation happens also in DatafeedJobRunner, the reason we do it here too is that if it fails there
// we are unable to provide the user immediate feedback. We would create the task and the validation would fail // we are unable to provide the user immediate feedback. We would create the task and the validation would fail
// in the background, whereas now the validation failure is part of the response being returned. // in the background, whereas now the validation failure is part of the response being returned.
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE); MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
ScheduledJobRunner.validate(request.schedulerId, mlMetadata); DatafeedJobRunner.validate(request.datafeedId, mlMetadata);
InternalStartSchedulerAction.Request internalRequest = InternalStartDatafeedAction.Request internalRequest =
new InternalStartSchedulerAction.Request(request.schedulerId, request.startTime); new InternalStartDatafeedAction.Request(request.datafeedId, request.startTime);
internalRequest.setEndTime(request.endTime); internalRequest.setEndTime(request.endTime);
transportAction.execute(internalRequest, LoggingTaskListener.instance()); transportAction.execute(internalRequest, LoggingTaskListener.instance());
schedulerStatusObserver.waitForStatus(request.schedulerId, request.startTimeout, SchedulerStatus.STARTED, e -> { datafeedStatusObserver.waitForStatus(request.datafeedId, request.startTimeout, DatafeedStatus.STARTED, e -> {
if (e != null) { if (e != null) {
listener.onFailure(e); listener.onFailure(e);
} else { } else {

View File

@ -35,22 +35,22 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.Scheduler; import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.utils.SchedulerStatusObserver; import org.elasticsearch.xpack.ml.utils.DatafeedStatusObserver;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class StopSchedulerAction public class StopDatafeedAction
extends Action<StopSchedulerAction.Request, StopSchedulerAction.Response, StopSchedulerAction.RequestBuilder> { extends Action<StopDatafeedAction.Request, StopDatafeedAction.Response, StopDatafeedAction.RequestBuilder> {
public static final StopSchedulerAction INSTANCE = new StopSchedulerAction(); public static final StopDatafeedAction INSTANCE = new StopDatafeedAction();
public static final String NAME = "cluster:admin/ml/scheduler/stop"; public static final String NAME = "cluster:admin/ml/datafeed/stop";
private StopSchedulerAction() { private StopDatafeedAction() {
super(NAME); super(NAME);
} }
@ -66,18 +66,18 @@ public class StopSchedulerAction
public static class Request extends ActionRequest { public static class Request extends ActionRequest {
private String schedulerId; private String datafeedId;
private TimeValue stopTimeout = TimeValue.timeValueSeconds(30); private TimeValue stopTimeout = TimeValue.timeValueSeconds(30);
public Request(String jobId) { public Request(String jobId) {
this.schedulerId = ExceptionsHelper.requireNonNull(jobId, SchedulerConfig.ID.getPreferredName()); this.datafeedId = ExceptionsHelper.requireNonNull(jobId, DatafeedConfig.ID.getPreferredName());
} }
Request() { Request() {
} }
public String getSchedulerId() { public String getDatafeedId() {
return schedulerId; return datafeedId;
} }
public void setStopTimeout(TimeValue stopTimeout) { public void setStopTimeout(TimeValue stopTimeout) {
@ -92,18 +92,18 @@ public class StopSchedulerAction
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
schedulerId = in.readString(); datafeedId = in.readString();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(schedulerId); out.writeString(datafeedId);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulerId); return Objects.hash(datafeedId);
} }
@Override @Override
@ -115,13 +115,13 @@ public class StopSchedulerAction
return false; return false;
} }
Request other = (Request) obj; Request other = (Request) obj;
return Objects.equals(schedulerId, other.schedulerId); return Objects.equals(datafeedId, other.datafeedId);
} }
} }
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> { static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, StopSchedulerAction action) { public RequestBuilder(ElasticsearchClient client, StopDatafeedAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
@ -148,34 +148,34 @@ public class StopSchedulerAction
private final ClusterService clusterService; private final ClusterService clusterService;
private final TransportListTasksAction listTasksAction; private final TransportListTasksAction listTasksAction;
private final TransportCancelTasksAction cancelTasksAction; private final TransportCancelTasksAction cancelTasksAction;
private final SchedulerStatusObserver schedulerStatusObserver; private final DatafeedStatusObserver datafeedStatusObserver;
@Inject @Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ClusterService clusterService, TransportCancelTasksAction cancelTasksAction, ClusterService clusterService, TransportCancelTasksAction cancelTasksAction,
TransportListTasksAction listTasksAction) { TransportListTasksAction listTasksAction) {
super(settings, StopSchedulerAction.NAME, threadPool, transportService, actionFilters, super(settings, StopDatafeedAction.NAME, threadPool, transportService, actionFilters,
indexNameExpressionResolver, Request::new); indexNameExpressionResolver, Request::new);
this.clusterService = clusterService; this.clusterService = clusterService;
this.listTasksAction = listTasksAction; this.listTasksAction = listTasksAction;
this.cancelTasksAction = cancelTasksAction; this.cancelTasksAction = cancelTasksAction;
this.schedulerStatusObserver = new SchedulerStatusObserver(threadPool, clusterService); this.datafeedStatusObserver = new DatafeedStatusObserver(threadPool, clusterService);
} }
@Override @Override
protected void doExecute(Request request, ActionListener<Response> listener) { protected void doExecute(Request request, ActionListener<Response> listener) {
String schedulerId = request.getSchedulerId(); String datafeedId = request.getDatafeedId();
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE); MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
validate(schedulerId, mlMetadata); validate(datafeedId, mlMetadata);
ListTasksRequest listTasksRequest = new ListTasksRequest(); ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions(InternalStartSchedulerAction.NAME); listTasksRequest.setActions(InternalStartDatafeedAction.NAME);
listTasksRequest.setDetailed(true); listTasksRequest.setDetailed(true);
listTasksAction.execute(listTasksRequest, new ActionListener<ListTasksResponse>() { listTasksAction.execute(listTasksRequest, new ActionListener<ListTasksResponse>() {
@Override @Override
public void onResponse(ListTasksResponse listTasksResponse) { public void onResponse(ListTasksResponse listTasksResponse) {
String expectedJobDescription = "scheduler-" + schedulerId; String expectedJobDescription = "datafeed-" + datafeedId;
for (TaskInfo taskInfo : listTasksResponse.getTasks()) { for (TaskInfo taskInfo : listTasksResponse.getTasks()) {
if (expectedJobDescription.equals(taskInfo.getDescription())) { if (expectedJobDescription.equals(taskInfo.getDescription())) {
CancelTasksRequest cancelTasksRequest = new CancelTasksRequest(); CancelTasksRequest cancelTasksRequest = new CancelTasksRequest();
@ -183,7 +183,7 @@ public class StopSchedulerAction
cancelTasksAction.execute(cancelTasksRequest, new ActionListener<CancelTasksResponse>() { cancelTasksAction.execute(cancelTasksRequest, new ActionListener<CancelTasksResponse>() {
@Override @Override
public void onResponse(CancelTasksResponse cancelTasksResponse) { public void onResponse(CancelTasksResponse cancelTasksResponse) {
schedulerStatusObserver.waitForStatus(schedulerId, request.stopTimeout, SchedulerStatus.STOPPED, e -> { datafeedStatusObserver.waitForStatus(datafeedId, request.stopTimeout, DatafeedStatus.STOPPED, e -> {
if (e != null) { if (e != null) {
listener.onFailure(e); listener.onFailure(e);
} else { } else {
@ -200,7 +200,7 @@ public class StopSchedulerAction
return; return;
} }
} }
listener.onFailure(new ResourceNotFoundException("No scheduler [" + schedulerId + "] running")); listener.onFailure(new ResourceNotFoundException("No datafeed [" + datafeedId + "] running"));
} }
@Override @Override
@ -213,15 +213,15 @@ public class StopSchedulerAction
} }
static void validate(String schedulerId, MlMetadata mlMetadata) { static void validate(String datafeedId, MlMetadata mlMetadata) {
Scheduler scheduler = mlMetadata.getScheduler(schedulerId); Datafeed datafeed = mlMetadata.getDatafeed(datafeedId);
if (scheduler == null) { if (datafeed == null) {
throw new ResourceNotFoundException(Messages.getMessage(Messages.SCHEDULER_NOT_FOUND, schedulerId)); throw new ResourceNotFoundException(Messages.getMessage(Messages.DATAFEED_NOT_FOUND, datafeedId));
} }
if (scheduler.getStatus() == SchedulerStatus.STOPPED) { if (datafeed.getStatus() == DatafeedStatus.STOPPED) {
throw new ElasticsearchStatusException("scheduler already stopped, expected scheduler status [{}], but got [{}]", throw new ElasticsearchStatusException("datafeed already stopped, expected datafeed status [{}], but got [{}]",
RestStatus.CONFLICT, SchedulerStatus.STARTED, scheduler.getStatus()); RestStatus.CONFLICT, DatafeedStatus.STARTED, datafeed.getStatus());
} }
} }
} }

View File

@ -25,21 +25,21 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.job.manager.JobManager; import org.elasticsearch.xpack.ml.job.manager.JobManager;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAction.Request, public class UpdateDatafeedStatusAction extends Action<UpdateDatafeedStatusAction.Request,
UpdateSchedulerStatusAction.Response, UpdateSchedulerStatusAction.RequestBuilder> { UpdateDatafeedStatusAction.Response, UpdateDatafeedStatusAction.RequestBuilder> {
public static final UpdateSchedulerStatusAction INSTANCE = new UpdateSchedulerStatusAction(); public static final UpdateDatafeedStatusAction INSTANCE = new UpdateDatafeedStatusAction();
public static final String NAME = "cluster:admin/ml/scheduler/status/update"; public static final String NAME = "cluster:admin/ml/datafeed/status/update";
private UpdateSchedulerStatusAction() { private UpdateDatafeedStatusAction() {
super(NAME); super(NAME);
} }
@ -55,30 +55,30 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
public static class Request extends AcknowledgedRequest<Request> { public static class Request extends AcknowledgedRequest<Request> {
private String schedulerId; private String datafeedId;
private SchedulerStatus schedulerStatus; private DatafeedStatus datafeedStatus;
public Request(String schedulerId, SchedulerStatus schedulerStatus) { public Request(String datafeedId, DatafeedStatus datafeedStatus) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName()); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
this.schedulerStatus = ExceptionsHelper.requireNonNull(schedulerStatus, "status"); this.datafeedStatus = ExceptionsHelper.requireNonNull(datafeedStatus, "status");
} }
Request() {} Request() {}
public String getSchedulerId() { public String getDatafeedId() {
return schedulerId; return datafeedId;
} }
public void setSchedulerId(String schedulerId) { public void setDatafeedId(String datafeedId) {
this.schedulerId = schedulerId; this.datafeedId = datafeedId;
} }
public SchedulerStatus getSchedulerStatus() { public DatafeedStatus getDatafeedStatus() {
return schedulerStatus; return datafeedStatus;
} }
public void setSchedulerStatus(SchedulerStatus schedulerStatus) { public void setDatafeedStatus(DatafeedStatus datafeedStatus) {
this.schedulerStatus = schedulerStatus; this.datafeedStatus = datafeedStatus;
} }
@Override @Override
@ -89,20 +89,20 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
schedulerId = in.readString(); datafeedId = in.readString();
schedulerStatus = SchedulerStatus.fromStream(in); datafeedStatus = DatafeedStatus.fromStream(in);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(schedulerId); out.writeString(datafeedId);
schedulerStatus.writeTo(out); datafeedStatus.writeTo(out);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(schedulerId, schedulerStatus); return Objects.hash(datafeedId, datafeedStatus);
} }
@Override @Override
@ -113,22 +113,22 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
if (obj == null || obj.getClass() != getClass()) { if (obj == null || obj.getClass() != getClass()) {
return false; return false;
} }
UpdateSchedulerStatusAction.Request other = (UpdateSchedulerStatusAction.Request) obj; UpdateDatafeedStatusAction.Request other = (UpdateDatafeedStatusAction.Request) obj;
return Objects.equals(schedulerId, other.schedulerId) && Objects.equals(schedulerStatus, other.schedulerStatus); return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(datafeedStatus, other.datafeedStatus);
} }
@Override @Override
public String toString() { public String toString() {
return "Request{" + return "Request{" +
SchedulerConfig.ID.getPreferredName() + "='" + schedulerId + "', " + DatafeedConfig.ID.getPreferredName() + "='" + datafeedId + "', " +
"status=" + schedulerStatus + "status=" + datafeedStatus +
'}'; '}';
} }
} }
static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> { static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, UpdateSchedulerStatusAction action) { public RequestBuilder(ElasticsearchClient client, UpdateDatafeedStatusAction action) {
super(client, action, new Request()); super(client, action, new Request());
} }
} }
@ -162,7 +162,7 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
JobManager jobManager) { JobManager jobManager) {
super(settings, UpdateSchedulerStatusAction.NAME, transportService, clusterService, threadPool, actionFilters, super(settings, UpdateDatafeedStatusAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new); indexNameExpressionResolver, Request::new);
this.jobManager = jobManager; this.jobManager = jobManager;
} }
@ -179,7 +179,7 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
@Override @Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception { protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
jobManager.updateSchedulerStatus(request, listener); jobManager.updateDatafeedStatus(request, listener);
} }
@Override @Override

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
@ -17,34 +17,34 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class Scheduler extends AbstractDiffable<Scheduler> implements ToXContent { public class Datafeed extends AbstractDiffable<Datafeed> implements ToXContent {
private static final ParseField CONFIG_FIELD = new ParseField("config"); private static final ParseField CONFIG_FIELD = new ParseField("config");
private static final ParseField STATUS_FIELD = new ParseField("status"); private static final ParseField STATUS_FIELD = new ParseField("status");
// Used for QueryPage // Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("schedulers"); public static final ParseField RESULTS_FIELD = new ParseField("datafeeds");
public static final ConstructingObjectParser<Scheduler, Void> PARSER = new ConstructingObjectParser<>("scheduler", public static final ConstructingObjectParser<Datafeed, Void> PARSER = new ConstructingObjectParser<>("datafeed",
a -> new Scheduler(((SchedulerConfig.Builder) a[0]).build(), (SchedulerStatus) a[1])); a -> new Datafeed(((DatafeedConfig.Builder) a[0]).build(), (DatafeedStatus) a[1]));
static { static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(), SchedulerConfig.PARSER, CONFIG_FIELD); PARSER.declareObject(ConstructingObjectParser.constructorArg(), DatafeedConfig.PARSER, CONFIG_FIELD);
PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> SchedulerStatus.fromString(p.text()), STATUS_FIELD, PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> DatafeedStatus.fromString(p.text()), STATUS_FIELD,
ObjectParser.ValueType.STRING); ObjectParser.ValueType.STRING);
} }
private final SchedulerConfig config; private final DatafeedConfig config;
private final SchedulerStatus status; private final DatafeedStatus status;
public Scheduler(SchedulerConfig config, SchedulerStatus status) { public Datafeed(DatafeedConfig config, DatafeedStatus status) {
this.config = config; this.config = config;
this.status = status; this.status = status;
} }
public Scheduler(StreamInput in) throws IOException { public Datafeed(StreamInput in) throws IOException {
this.config = new SchedulerConfig(in); this.config = new DatafeedConfig(in);
this.status = SchedulerStatus.fromStream(in); this.status = DatafeedStatus.fromStream(in);
} }
public String getId() { public String getId() {
@ -55,11 +55,11 @@ public class Scheduler extends AbstractDiffable<Scheduler> implements ToXContent
return config.getJobId(); return config.getJobId();
} }
public SchedulerConfig getConfig() { public DatafeedConfig getConfig() {
return config; return config;
} }
public SchedulerStatus getStatus() { public DatafeedStatus getStatus() {
return status; return status;
} }
@ -82,7 +82,7 @@ public class Scheduler extends AbstractDiffable<Scheduler> implements ToXContent
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
Scheduler that = (Scheduler) o; Datafeed that = (Datafeed) o;
return Objects.equals(config, that.config) && return Objects.equals(config, that.config) &&
Objects.equals(status, that.status); Objects.equals(status, that.status);
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.action.support.ToXContentToBytes;
@ -32,14 +32,14 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
/** /**
* Scheduler configuration options. Describes where to proactively pull input * Datafeed configuration options. Describes where to proactively pull input
* data from. * data from.
* <p> * <p>
* If a value has not been set it will be <code>null</code>. Object wrappers are * If a value has not been set it will be <code>null</code>. Object wrappers are
* used around integral types and booleans so they can take <code>null</code> * used around integral types and booleans so they can take <code>null</code>
* values. * values.
*/ */
public class SchedulerConfig extends ToXContentToBytes implements Writeable { public class DatafeedConfig extends ToXContentToBytes implements Writeable {
/** /**
* The field name used to specify aggregation fields in Elasticsearch * The field name used to specify aggregation fields in Elasticsearch
@ -52,7 +52,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
*/ */
public static final String DOC_COUNT = "doc_count"; public static final String DOC_COUNT = "doc_count";
public static final ParseField ID = new ParseField("scheduler_id"); public static final ParseField ID = new ParseField("datafeed_id");
public static final ParseField QUERY_DELAY = new ParseField("query_delay"); public static final ParseField QUERY_DELAY = new ParseField("query_delay");
public static final ParseField FREQUENCY = new ParseField("frequency"); public static final ParseField FREQUENCY = new ParseField("frequency");
public static final ParseField INDEXES = new ParseField("indexes"); public static final ParseField INDEXES = new ParseField("indexes");
@ -64,7 +64,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields"); public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields");
public static final ParseField SOURCE = new ParseField("_source"); public static final ParseField SOURCE = new ParseField("_source");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("scheduler_config", Builder::new); public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("datafeed_config", Builder::new);
static { static {
PARSER.declareString(Builder::setId, ID); PARSER.declareString(Builder::setId, ID);
@ -111,7 +111,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
private final Integer scrollSize; private final Integer scrollSize;
private final boolean source; private final boolean source;
private SchedulerConfig(String id, String jobId, Long queryDelay, Long frequency, List<String> indexes, List<String> types, private DatafeedConfig(String id, String jobId, Long queryDelay, Long frequency, List<String> indexes, List<String> types,
QueryBuilder query, AggregatorFactories.Builder aggregations, QueryBuilder query, AggregatorFactories.Builder aggregations,
List<SearchSourceBuilder.ScriptField> scriptFields, Integer scrollSize, boolean source) { List<SearchSourceBuilder.ScriptField> scriptFields, Integer scrollSize, boolean source) {
this.id = id; this.id = id;
@ -127,7 +127,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
this.source = source; this.source = source;
} }
public SchedulerConfig(StreamInput in) throws IOException { public DatafeedConfig(StreamInput in) throws IOException {
this.id = in.readString(); this.id = in.readString();
this.jobId = in.readString(); this.jobId = in.readString();
this.queryDelay = in.readOptionalLong(); this.queryDelay = in.readOptionalLong();
@ -285,11 +285,11 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
return true; return true;
} }
if (other instanceof SchedulerConfig == false) { if (other instanceof DatafeedConfig == false) {
return false; return false;
} }
SchedulerConfig that = (SchedulerConfig) other; DatafeedConfig that = (DatafeedConfig) other;
return Objects.equals(this.id, that.id) return Objects.equals(this.id, that.id)
&& Objects.equals(this.jobId, that.jobId) && Objects.equals(this.jobId, that.jobId)
@ -335,7 +335,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
} }
public Builder(SchedulerConfig config) { public Builder(DatafeedConfig config) {
this.id = config.id; this.id = config.id;
this.jobId = config.jobId; this.jobId = config.jobId;
this.queryDelay = config.queryDelay; this.queryDelay = config.queryDelay;
@ -349,8 +349,8 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
this.source = config.source; this.source = config.source;
} }
public void setId(String schedulerId) { public void setId(String datafeedId) {
id = ExceptionsHelper.requireNonNull(schedulerId, ID.getPreferredName()); id = ExceptionsHelper.requireNonNull(datafeedId, ID.getPreferredName());
} }
public void setJobId(String jobId) { public void setJobId(String jobId) {
@ -367,8 +367,8 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public void setQueryDelay(long queryDelay) { public void setQueryDelay(long queryDelay) {
if (queryDelay < 0) { if (queryDelay < 0) {
String msg = Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
SchedulerConfig.QUERY_DELAY.getPreferredName(), queryDelay); DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay);
throw new IllegalArgumentException(msg); throw new IllegalArgumentException(msg);
} }
this.queryDelay = queryDelay; this.queryDelay = queryDelay;
@ -376,8 +376,8 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public void setFrequency(long frequency) { public void setFrequency(long frequency) {
if (frequency <= 0) { if (frequency <= 0) {
String msg = Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
SchedulerConfig.FREQUENCY.getPreferredName(), frequency); DatafeedConfig.FREQUENCY.getPreferredName(), frequency);
throw new IllegalArgumentException(msg); throw new IllegalArgumentException(msg);
} }
this.frequency = frequency; this.frequency = frequency;
@ -399,8 +399,8 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public void setScrollSize(int scrollSize) { public void setScrollSize(int scrollSize) {
if (scrollSize < 0) { if (scrollSize < 0) {
String msg = Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
SchedulerConfig.SCROLL_SIZE.getPreferredName(), scrollSize); DatafeedConfig.SCROLL_SIZE.getPreferredName(), scrollSize);
throw new IllegalArgumentException(msg); throw new IllegalArgumentException(msg);
} }
this.scrollSize = scrollSize; this.scrollSize = scrollSize;
@ -410,7 +410,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
this.source = enabled; this.source = enabled;
} }
public SchedulerConfig build() { public DatafeedConfig build() {
ExceptionsHelper.requireNonNull(id, ID.getPreferredName()); ExceptionsHelper.requireNonNull(id, ID.getPreferredName());
ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
if (!MlStrings.isValidId(id)) { if (!MlStrings.isValidId(id)) {
@ -422,12 +422,12 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
if (types == null || types.isEmpty() || types.contains(null) || types.contains("")) { if (types == null || types.isEmpty() || types.contains(null) || types.contains("")) {
throw invalidOptionValue(TYPES.getPreferredName(), types); throw invalidOptionValue(TYPES.getPreferredName(), types);
} }
return new SchedulerConfig(id, jobId, queryDelay, frequency, indexes, types, query, aggregations, scriptFields, scrollSize, return new DatafeedConfig(id, jobId, queryDelay, frequency, indexes, types, query, aggregations, scriptFields, scrollSize,
source); source);
} }
private static ElasticsearchException invalidOptionValue(String fieldName, Object value) { private static ElasticsearchException invalidOptionValue(String fieldName, Object value) {
String msg = Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, fieldName, value); String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, fieldName, value);
throw new IllegalArgumentException(msg); throw new IllegalArgumentException(msg);
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
@ -17,8 +17,8 @@ import org.elasticsearch.xpack.ml.job.DataCounts;
import org.elasticsearch.xpack.ml.job.DataDescription; import org.elasticsearch.xpack.ml.job.DataDescription;
import org.elasticsearch.xpack.ml.job.audit.Auditor; import org.elasticsearch.xpack.ml.job.audit.Auditor;
import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
@ -29,9 +29,9 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier; import java.util.function.Supplier;
class ScheduledJob { class DatafeedJob {
private static final Logger LOGGER = Loggers.getLogger(ScheduledJob.class); private static final Logger LOGGER = Loggers.getLogger(DatafeedJob.class);
private static final int NEXT_TASK_DELAY_MS = 100; private static final int NEXT_TASK_DELAY_MS = 100;
private final Auditor auditor; private final Auditor auditor;
@ -47,7 +47,7 @@ class ScheduledJob {
private volatile Long lastEndTimeMs; private volatile Long lastEndTimeMs;
private AtomicBoolean running = new AtomicBoolean(true); private AtomicBoolean running = new AtomicBoolean(true);
ScheduledJob(String jobId, DataDescription dataDescription, long frequencyMs, long queryDelayMs, DatafeedJob(String jobId, DataDescription dataDescription, long frequencyMs, long queryDelayMs,
DataExtractorFactory dataExtractorFactory, Client client, Auditor auditor, Supplier<Long> currentTimeSupplier, DataExtractorFactory dataExtractorFactory, Client client, Auditor auditor, Supplier<Long> currentTimeSupplier,
long latestFinalBucketEndTimeMs, long latestRecordTimeMs) { long latestFinalBucketEndTimeMs, long latestRecordTimeMs) {
this.jobId = jobId; this.jobId = jobId;
@ -74,12 +74,12 @@ class ScheduledJob {
if (isLookbackOnly) { if (isLookbackOnly) {
return null; return null;
} else { } else {
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_STARTED_REALTIME)); auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STARTED_REALTIME));
return nextRealtimeTimestamp(); return nextRealtimeTimestamp();
} }
} }
String msg = Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_STARTED_FROM_TO, String msg = Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STARTED_FROM_TO,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackStartTimeMs), DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackStartTimeMs),
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackEnd)); DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackEnd));
auditor.info(msg); auditor.info(msg);
@ -87,12 +87,12 @@ class ScheduledJob {
FlushJobAction.Request request = new FlushJobAction.Request(jobId); FlushJobAction.Request request = new FlushJobAction.Request(jobId);
request.setCalcInterim(true); request.setCalcInterim(true);
run(lookbackStartTimeMs, lookbackEnd, request); run(lookbackStartTimeMs, lookbackEnd, request);
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_LOOKBACK_COMPLETED)); auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_LOOKBACK_COMPLETED));
LOGGER.info("[{}] Lookback has finished", jobId); LOGGER.info("[{}] Lookback has finished", jobId);
if (isLookbackOnly) { if (isLookbackOnly) {
return null; return null;
} else { } else {
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_CONTINUED_REALTIME)); auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_CONTINUED_REALTIME));
return nextRealtimeTimestamp(); return nextRealtimeTimestamp();
} }
} }
@ -109,14 +109,14 @@ class ScheduledJob {
} }
/** /**
* Stops the scheduled job * Stops the datafeed job
* *
* @return <code>true</code> when the scheduler was running and this method invocation stopped it, * @return <code>true</code> when the datafeed was running and this method invocation stopped it,
* otherwise <code>false</code> is returned * otherwise <code>false</code> is returned
*/ */
public boolean stop() { public boolean stop() {
if (running.compareAndSet(true, false)) { if (running.compareAndSet(true, false)) {
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_STOPPED)); auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STOPPED));
return true; return true;
} else { } else {
return false; return false;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.ResourceNotFoundException;
@ -17,8 +17,8 @@ import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.InternalStartSchedulerAction; import org.elasticsearch.xpack.ml.action.InternalStartDatafeedAction;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction; import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction;
import org.elasticsearch.xpack.ml.job.DataCounts; import org.elasticsearch.xpack.ml.job.DataCounts;
import org.elasticsearch.xpack.ml.job.DataDescription; import org.elasticsearch.xpack.ml.job.DataDescription;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
@ -31,8 +31,8 @@ import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage; import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.job.results.Bucket; import org.elasticsearch.xpack.ml.job.results.Bucket;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.ml.scheduler.extractor.scroll.ScrollDataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.scroll.ScrollDataExtractorFactory;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.time.Duration; import java.time.Duration;
@ -43,7 +43,7 @@ import java.util.function.BiConsumer;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Supplier; import java.util.function.Supplier;
public class ScheduledJobRunner extends AbstractComponent { public class DatafeedJobRunner extends AbstractComponent {
private final Client client; private final Client client;
private final ClusterService clusterService; private final ClusterService clusterService;
@ -51,7 +51,7 @@ public class ScheduledJobRunner extends AbstractComponent {
private final ThreadPool threadPool; private final ThreadPool threadPool;
private final Supplier<Long> currentTimeSupplier; private final Supplier<Long> currentTimeSupplier;
public ScheduledJobRunner(ThreadPool threadPool, Client client, ClusterService clusterService, JobProvider jobProvider, public DatafeedJobRunner(ThreadPool threadPool, Client client, ClusterService clusterService, JobProvider jobProvider,
Supplier<Long> currentTimeSupplier) { Supplier<Long> currentTimeSupplier) {
super(Settings.EMPTY); super(Settings.EMPTY);
this.client = Objects.requireNonNull(client); this.client = Objects.requireNonNull(client);
@ -61,13 +61,13 @@ public class ScheduledJobRunner extends AbstractComponent {
this.currentTimeSupplier = Objects.requireNonNull(currentTimeSupplier); this.currentTimeSupplier = Objects.requireNonNull(currentTimeSupplier);
} }
public void run(String schedulerId, long startTime, Long endTime, InternalStartSchedulerAction.SchedulerTask task, public void run(String datafeedId, long startTime, Long endTime, InternalStartDatafeedAction.DatafeedTask task,
Consumer<Exception> handler) { Consumer<Exception> handler) {
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE); MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
validate(schedulerId, mlMetadata); validate(datafeedId, mlMetadata);
Scheduler scheduler = mlMetadata.getScheduler(schedulerId); Datafeed datafeed = mlMetadata.getDatafeed(datafeedId);
Job job = mlMetadata.getJobs().get(scheduler.getJobId()); Job job = mlMetadata.getJobs().get(datafeed.getJobId());
gatherInformation(job.getId(), (buckets, dataCounts) -> { gatherInformation(job.getId(), (buckets, dataCounts) -> {
long latestFinalBucketEndMs = -1L; long latestFinalBucketEndMs = -1L;
Duration bucketSpan = Duration.ofSeconds(job.getAnalysisConfig().getBucketSpan()); Duration bucketSpan = Duration.ofSeconds(job.getAnalysisConfig().getBucketSpan());
@ -78,47 +78,47 @@ public class ScheduledJobRunner extends AbstractComponent {
if (dataCounts.getLatestRecordTimeStamp() != null) { if (dataCounts.getLatestRecordTimeStamp() != null) {
latestRecordTimeMs = dataCounts.getLatestRecordTimeStamp().getTime(); latestRecordTimeMs = dataCounts.getLatestRecordTimeStamp().getTime();
} }
Holder holder = createJobScheduler(scheduler, job, latestFinalBucketEndMs, latestRecordTimeMs, handler, task); Holder holder = createJobDatafeed(datafeed, job, latestFinalBucketEndMs, latestRecordTimeMs, handler, task);
innerRun(holder, startTime, endTime); innerRun(holder, startTime, endTime);
}, handler); }, handler);
} }
// Important: Holder must be created and assigned to SchedulerTask before setting status to started, // Important: Holder must be created and assigned to DatafeedTask before setting status to started,
// otherwise if a stop scheduler call is made immediately after the start scheduler call we could cancel // otherwise if a stop datafeed call is made immediately after the start datafeed call we could cancel
// the SchedulerTask without stopping scheduler, which causes the scheduler to keep on running. // the DatafeedTask without stopping datafeed, which causes the datafeed to keep on running.
private void innerRun(Holder holder, long startTime, Long endTime) { private void innerRun(Holder holder, long startTime, Long endTime) {
setJobSchedulerStatus(holder.scheduler.getId(), SchedulerStatus.STARTED, error -> { setJobDatafeedStatus(holder.datafeed.getId(), DatafeedStatus.STARTED, error -> {
if (error != null) { if (error != null) {
holder.stop(error); holder.stop(error);
return; return;
} }
logger.info("Starting scheduler [{}] for job [{}]", holder.scheduler.getId(), holder.scheduler.getJobId()); logger.info("Starting datafeed [{}] for job [{}]", holder.datafeed.getId(), holder.datafeed.getJobId());
holder.future = threadPool.executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME).submit(() -> { holder.future = threadPool.executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME).submit(() -> {
Long next = null; Long next = null;
try { try {
next = holder.scheduledJob.runLookBack(startTime, endTime); next = holder.datafeedJob.runLookBack(startTime, endTime);
} catch (ScheduledJob.ExtractionProblemException e) { } catch (DatafeedJob.ExtractionProblemException e) {
if (endTime == null) { if (endTime == null) {
next = e.nextDelayInMsSinceEpoch; next = e.nextDelayInMsSinceEpoch;
} }
holder.problemTracker.reportExtractionProblem(e.getCause().getMessage()); holder.problemTracker.reportExtractionProblem(e.getCause().getMessage());
} catch (ScheduledJob.AnalysisProblemException e) { } catch (DatafeedJob.AnalysisProblemException e) {
if (endTime == null) { if (endTime == null) {
next = e.nextDelayInMsSinceEpoch; next = e.nextDelayInMsSinceEpoch;
} }
holder.problemTracker.reportAnalysisProblem(e.getCause().getMessage()); holder.problemTracker.reportAnalysisProblem(e.getCause().getMessage());
} catch (ScheduledJob.EmptyDataCountException e) { } catch (DatafeedJob.EmptyDataCountException e) {
if (endTime == null && holder.problemTracker.updateEmptyDataCount(true) == false) { if (endTime == null && holder.problemTracker.updateEmptyDataCount(true) == false) {
next = e.nextDelayInMsSinceEpoch; next = e.nextDelayInMsSinceEpoch;
} }
} catch (Exception e) { } catch (Exception e) {
logger.error("Failed lookback import for job [" + holder.scheduler.getJobId() + "]", e); logger.error("Failed lookback import for job [" + holder.datafeed.getJobId() + "]", e);
holder.stop(e); holder.stop(e);
return; return;
} }
if (next != null) { if (next != null) {
doScheduleRealtime(next, holder.scheduler.getJobId(), holder); doDatafeedRealtime(next, holder.datafeed.getJobId(), holder);
} else { } else {
holder.stop(null); holder.stop(null);
holder.problemTracker.finishReport(); holder.problemTracker.finishReport();
@ -127,21 +127,21 @@ public class ScheduledJobRunner extends AbstractComponent {
}); });
} }
private void doScheduleRealtime(long delayInMsSinceEpoch, String jobId, Holder holder) { private void doDatafeedRealtime(long delayInMsSinceEpoch, String jobId, Holder holder) {
if (holder.isRunning()) { if (holder.isRunning()) {
TimeValue delay = computeNextDelay(delayInMsSinceEpoch); TimeValue delay = computeNextDelay(delayInMsSinceEpoch);
logger.debug("Waiting [{}] before executing next realtime import for job [{}]", delay, jobId); logger.debug("Waiting [{}] before executing next realtime import for job [{}]", delay, jobId);
holder.future = threadPool.schedule(delay, MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME, () -> { holder.future = threadPool.schedule(delay, MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME, () -> {
long nextDelayInMsSinceEpoch; long nextDelayInMsSinceEpoch;
try { try {
nextDelayInMsSinceEpoch = holder.scheduledJob.runRealtime(); nextDelayInMsSinceEpoch = holder.datafeedJob.runRealtime();
} catch (ScheduledJob.ExtractionProblemException e) { } catch (DatafeedJob.ExtractionProblemException e) {
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch; nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
holder.problemTracker.reportExtractionProblem(e.getCause().getMessage()); holder.problemTracker.reportExtractionProblem(e.getCause().getMessage());
} catch (ScheduledJob.AnalysisProblemException e) { } catch (DatafeedJob.AnalysisProblemException e) {
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch; nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
holder.problemTracker.reportAnalysisProblem(e.getCause().getMessage()); holder.problemTracker.reportAnalysisProblem(e.getCause().getMessage());
} catch (ScheduledJob.EmptyDataCountException e) { } catch (DatafeedJob.EmptyDataCountException e) {
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch; nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
if (holder.problemTracker.updateEmptyDataCount(true)) { if (holder.problemTracker.updateEmptyDataCount(true)) {
holder.problemTracker.finishReport(); holder.problemTracker.finishReport();
@ -149,56 +149,56 @@ public class ScheduledJobRunner extends AbstractComponent {
return; return;
} }
} catch (Exception e) { } catch (Exception e) {
logger.error("Unexpected scheduler failure for job [" + jobId + "] stopping...", e); logger.error("Unexpected datafeed failure for job [" + jobId + "] stopping...", e);
holder.stop(e); holder.stop(e);
return; return;
} }
holder.problemTracker.finishReport(); holder.problemTracker.finishReport();
doScheduleRealtime(nextDelayInMsSinceEpoch, jobId, holder); doDatafeedRealtime(nextDelayInMsSinceEpoch, jobId, holder);
}); });
} }
} }
public static void validate(String schedulerId, MlMetadata mlMetadata) { public static void validate(String datafeedId, MlMetadata mlMetadata) {
Scheduler scheduler = mlMetadata.getScheduler(schedulerId); Datafeed datafeed = mlMetadata.getDatafeed(datafeedId);
if (scheduler == null) { if (datafeed == null) {
throw ExceptionsHelper.missingSchedulerException(schedulerId); throw ExceptionsHelper.missingDatafeedException(datafeedId);
} }
Job job = mlMetadata.getJobs().get(scheduler.getJobId()); Job job = mlMetadata.getJobs().get(datafeed.getJobId());
if (job == null) { if (job == null) {
throw ExceptionsHelper.missingJobException(scheduler.getJobId()); throw ExceptionsHelper.missingJobException(datafeed.getJobId());
} }
Allocation allocation = mlMetadata.getAllocations().get(scheduler.getJobId()); Allocation allocation = mlMetadata.getAllocations().get(datafeed.getJobId());
if (allocation.getStatus() != JobStatus.OPENED) { if (allocation.getStatus() != JobStatus.OPENED) {
throw new ElasticsearchStatusException("cannot start scheduler, expected job status [{}], but got [{}]", throw new ElasticsearchStatusException("cannot start datafeed, expected job status [{}], but got [{}]",
RestStatus.CONFLICT, JobStatus.OPENED, allocation.getStatus()); RestStatus.CONFLICT, JobStatus.OPENED, allocation.getStatus());
} }
SchedulerStatus status = scheduler.getStatus(); DatafeedStatus status = datafeed.getStatus();
if (status != SchedulerStatus.STOPPED) { if (status != DatafeedStatus.STOPPED) {
throw new ElasticsearchStatusException("scheduler already started, expected scheduler status [{}], but got [{}]", throw new ElasticsearchStatusException("datafeed already started, expected datafeed status [{}], but got [{}]",
RestStatus.CONFLICT, SchedulerStatus.STOPPED, status); RestStatus.CONFLICT, DatafeedStatus.STOPPED, status);
} }
ScheduledJobValidator.validate(scheduler.getConfig(), job); DatafeedJobValidator.validate(datafeed.getConfig(), job);
} }
private Holder createJobScheduler(Scheduler scheduler, Job job, long finalBucketEndMs, long latestRecordTimeMs, private Holder createJobDatafeed(Datafeed datafeed, Job job, long finalBucketEndMs, long latestRecordTimeMs,
Consumer<Exception> handler, InternalStartSchedulerAction.SchedulerTask task) { Consumer<Exception> handler, InternalStartDatafeedAction.DatafeedTask task) {
Auditor auditor = jobProvider.audit(job.getId()); Auditor auditor = jobProvider.audit(job.getId());
Duration frequency = getFrequencyOrDefault(scheduler, job); Duration frequency = getFrequencyOrDefault(datafeed, job);
Duration queryDelay = Duration.ofSeconds(scheduler.getConfig().getQueryDelay()); Duration queryDelay = Duration.ofSeconds(datafeed.getConfig().getQueryDelay());
DataExtractorFactory dataExtractorFactory = createDataExtractorFactory(scheduler.getConfig(), job); DataExtractorFactory dataExtractorFactory = createDataExtractorFactory(datafeed.getConfig(), job);
ScheduledJob scheduledJob = new ScheduledJob(job.getId(), buildDataDescription(job), frequency.toMillis(), queryDelay.toMillis(), DatafeedJob datafeedJob = new DatafeedJob(job.getId(), buildDataDescription(job), frequency.toMillis(), queryDelay.toMillis(),
dataExtractorFactory, client, auditor, currentTimeSupplier, finalBucketEndMs, latestRecordTimeMs); dataExtractorFactory, client, auditor, currentTimeSupplier, finalBucketEndMs, latestRecordTimeMs);
Holder holder = new Holder(scheduler, scheduledJob, new ProblemTracker(() -> auditor), handler); Holder holder = new Holder(datafeed, datafeedJob, new ProblemTracker(() -> auditor), handler);
task.setHolder(holder); task.setHolder(holder);
return holder; return holder;
} }
DataExtractorFactory createDataExtractorFactory(SchedulerConfig schedulerConfig, Job job) { DataExtractorFactory createDataExtractorFactory(DatafeedConfig datafeedConfig, Job job) {
return new ScrollDataExtractorFactory(client, schedulerConfig, job); return new ScrollDataExtractorFactory(client, datafeedConfig, job);
} }
private static DataDescription buildDataDescription(Job job) { private static DataDescription buildDataDescription(Job job) {
@ -229,8 +229,8 @@ public class ScheduledJobRunner extends AbstractComponent {
}); });
} }
private static Duration getFrequencyOrDefault(Scheduler scheduler, Job job) { private static Duration getFrequencyOrDefault(Datafeed datafeed, Job job) {
Long frequency = scheduler.getConfig().getFrequency(); Long frequency = datafeed.getConfig().getFrequency();
Long bucketSpan = job.getAnalysisConfig().getBucketSpan(); Long bucketSpan = job.getAnalysisConfig().getBucketSpan();
return frequency == null ? DefaultFrequency.ofBucketSpan(bucketSpan) : Duration.ofSeconds(frequency); return frequency == null ? DefaultFrequency.ofBucketSpan(bucketSpan) : Duration.ofSeconds(frequency);
} }
@ -239,22 +239,22 @@ public class ScheduledJobRunner extends AbstractComponent {
return new TimeValue(Math.max(1, next - currentTimeSupplier.get())); return new TimeValue(Math.max(1, next - currentTimeSupplier.get()));
} }
private void setJobSchedulerStatus(String schedulerId, SchedulerStatus status, Consumer<Exception> handler) { private void setJobDatafeedStatus(String datafeedId, DatafeedStatus status, Consumer<Exception> handler) {
UpdateSchedulerStatusAction.Request request = new UpdateSchedulerStatusAction.Request(schedulerId, status); UpdateDatafeedStatusAction.Request request = new UpdateDatafeedStatusAction.Request(datafeedId, status);
client.execute(UpdateSchedulerStatusAction.INSTANCE, request, new ActionListener<UpdateSchedulerStatusAction.Response>() { client.execute(UpdateDatafeedStatusAction.INSTANCE, request, new ActionListener<UpdateDatafeedStatusAction.Response>() {
@Override @Override
public void onResponse(UpdateSchedulerStatusAction.Response response) { public void onResponse(UpdateDatafeedStatusAction.Response response) {
if (response.isAcknowledged()) { if (response.isAcknowledged()) {
logger.debug("successfully set scheduler [{}] status to [{}]", schedulerId, status); logger.debug("successfully set datafeed [{}] status to [{}]", datafeedId, status);
} else { } else {
logger.info("set scheduler [{}] status to [{}], but was not acknowledged", schedulerId, status); logger.info("set datafeed [{}] status to [{}], but was not acknowledged", datafeedId, status);
} }
handler.accept(null); handler.accept(null);
} }
@Override @Override
public void onFailure(Exception e) { public void onFailure(Exception e) {
logger.error("could not set scheduler [" + schedulerId + "] status to [" + status + "]", e); logger.error("could not set datafeed [" + datafeedId + "] status to [" + status + "]", e);
handler.accept(e); handler.accept(e);
} }
}); });
@ -262,31 +262,31 @@ public class ScheduledJobRunner extends AbstractComponent {
public class Holder { public class Holder {
private final Scheduler scheduler; private final Datafeed datafeed;
private final ScheduledJob scheduledJob; private final DatafeedJob datafeedJob;
private final ProblemTracker problemTracker; private final ProblemTracker problemTracker;
private final Consumer<Exception> handler; private final Consumer<Exception> handler;
volatile Future<?> future; volatile Future<?> future;
private Holder(Scheduler scheduler, ScheduledJob scheduledJob, ProblemTracker problemTracker, Consumer<Exception> handler) { private Holder(Datafeed datafeed, DatafeedJob datafeedJob, ProblemTracker problemTracker, Consumer<Exception> handler) {
this.scheduler = scheduler; this.datafeed = datafeed;
this.scheduledJob = scheduledJob; this.datafeedJob = datafeedJob;
this.problemTracker = problemTracker; this.problemTracker = problemTracker;
this.handler = handler; this.handler = handler;
} }
boolean isRunning() { boolean isRunning() {
return scheduledJob.isRunning(); return datafeedJob.isRunning();
} }
public void stop(Exception e) { public void stop(Exception e) {
logger.info("attempt to stop scheduler [{}] for job [{}]", scheduler.getId(), scheduler.getJobId()); logger.info("attempt to stop datafeed [{}] for job [{}]", datafeed.getId(), datafeed.getJobId());
if (scheduledJob.stop()) { if (datafeedJob.stop()) {
FutureUtils.cancel(future); FutureUtils.cancel(future);
setJobSchedulerStatus(scheduler.getId(), SchedulerStatus.STOPPED, error -> handler.accept(e)); setJobDatafeedStatus(datafeed.getId(), DatafeedStatus.STOPPED, error -> handler.accept(e));
logger.info("scheduler [{}] for job [{}] has been stopped", scheduler.getId(), scheduler.getJobId()); logger.info("datafeed [{}] for job [{}] has been stopped", datafeed.getId(), datafeed.getJobId());
} else { } else {
logger.info("scheduler [{}] for job [{}] was already stopped", scheduler.getId(), scheduler.getJobId()); logger.info("datafeed [{}] for job [{}] was already stopped", datafeed.getId(), datafeed.getJobId());
} }
} }

View File

@ -3,29 +3,29 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.xpack.ml.job.AnalysisConfig; import org.elasticsearch.xpack.ml.job.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.messages.Messages;
public final class ScheduledJobValidator { public final class DatafeedJobValidator {
private ScheduledJobValidator() {} private DatafeedJobValidator() {}
/** /**
* Validates a schedulerConfig in relation to the job it refers to * Validates a datafeedConfig in relation to the job it refers to
* @param schedulerConfig the scheduler config * @param datafeedConfig the datafeed config
* @param job the job * @param job the job
*/ */
public static void validate(SchedulerConfig schedulerConfig, Job job) { public static void validate(DatafeedConfig datafeedConfig, Job job) {
AnalysisConfig analysisConfig = job.getAnalysisConfig(); AnalysisConfig analysisConfig = job.getAnalysisConfig();
if (analysisConfig.getLatency() != null && analysisConfig.getLatency() > 0) { if (analysisConfig.getLatency() != null && analysisConfig.getLatency() > 0) {
throw new IllegalArgumentException(Messages.getMessage(Messages.SCHEDULER_DOES_NOT_SUPPORT_JOB_WITH_LATENCY)); throw new IllegalArgumentException(Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY));
} }
if (schedulerConfig.getAggregations() != null && !SchedulerConfig.DOC_COUNT.equals(analysisConfig.getSummaryCountFieldName())) { if (datafeedConfig.getAggregations() != null && !DatafeedConfig.DOC_COUNT.equals(analysisConfig.getSummaryCountFieldName())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
Messages.getMessage(Messages.SCHEDULER_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, SchedulerConfig.DOC_COUNT)); Messages.getMessage(Messages.DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, DatafeedConfig.DOC_COUNT));
} }
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -12,18 +12,18 @@ import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException; import java.io.IOException;
import java.util.Locale; import java.util.Locale;
public enum SchedulerStatus implements Writeable { public enum DatafeedStatus implements Writeable {
STARTED, STOPPED; STARTED, STOPPED;
public static SchedulerStatus fromString(String name) { public static DatafeedStatus fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT)); return valueOf(name.trim().toUpperCase(Locale.ROOT));
} }
public static SchedulerStatus fromStream(StreamInput in) throws IOException { public static DatafeedStatus fromStream(StreamInput in) throws IOException {
int ordinal = in.readVInt(); int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) { if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown public enum SchedulerStatus {\n ordinal [" + ordinal + "]"); throw new IOException("Unknown public enum DatafeedStatus {\n ordinal [" + ordinal + "]");
} }
return values()[ordinal]; return values()[ordinal];
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.xpack.ml.job.audit.Auditor; import org.elasticsearch.xpack.ml.job.audit.Auditor;
import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.messages.Messages;
@ -13,7 +13,7 @@ import java.util.function.Supplier;
/** /**
* <p> * <p>
* Keeps track of problems the scheduler encounters and audits * Keeps track of problems the datafeed encounters and audits
* messages appropriately. * messages appropriately.
* </p> * </p>
* <p> * <p>
@ -45,7 +45,7 @@ class ProblemTracker {
* @param problemMessage the problem message * @param problemMessage the problem message
*/ */
public void reportAnalysisProblem(String problemMessage) { public void reportAnalysisProblem(String problemMessage) {
reportProblem(Messages.JOB_AUDIT_SCHEDULER_DATA_ANALYSIS_ERROR, problemMessage); reportProblem(Messages.JOB_AUDIT_DATAFEED_DATA_ANALYSIS_ERROR, problemMessage);
} }
/** /**
@ -54,7 +54,7 @@ class ProblemTracker {
* @param problemMessage the problem message * @param problemMessage the problem message
*/ */
public void reportExtractionProblem(String problemMessage) { public void reportExtractionProblem(String problemMessage) {
reportProblem(Messages.JOB_AUDIT_SCHEDULER_DATA_EXTRACTION_ERROR, problemMessage); reportProblem(Messages.JOB_AUDIT_DATAFEED_DATA_EXTRACTION_ERROR, problemMessage);
} }
/** /**
@ -82,12 +82,12 @@ class ProblemTracker {
if (empty && emptyDataCount < EMPTY_DATA_WARN_COUNT) { if (empty && emptyDataCount < EMPTY_DATA_WARN_COUNT) {
emptyDataCount++; emptyDataCount++;
if (emptyDataCount == EMPTY_DATA_WARN_COUNT) { if (emptyDataCount == EMPTY_DATA_WARN_COUNT) {
auditor.get().warning(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_NO_DATA)); auditor.get().warning(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_NO_DATA));
return true; return true;
} }
} else if (!empty) { } else if (!empty) {
if (emptyDataCount >= EMPTY_DATA_WARN_COUNT) { if (emptyDataCount >= EMPTY_DATA_WARN_COUNT) {
auditor.get().info(Messages.getMessage(Messages.JOB_AUDIR_SCHEDULER_DATA_SEEN_AGAIN)); auditor.get().info(Messages.getMessage(Messages.JOB_AUDIR_DATAFEED_DATA_SEEN_AGAIN));
} }
emptyDataCount = 0; emptyDataCount = 0;
} }
@ -103,7 +103,7 @@ class ProblemTracker {
*/ */
public void finishReport() { public void finishReport() {
if (!hasProblems && hadProblems) { if (!hasProblems && hadProblems) {
auditor.get().info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_RECOVERED)); auditor.get().info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_RECOVERED));
} }
hadProblems = hasProblems; hadProblems = hasProblems;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor; package org.elasticsearch.xpack.ml.datafeed.extractor;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor; package org.elasticsearch.xpack.ml.datafeed.extractor;
public interface DataExtractorFactory { public interface DataExtractorFactory {
DataExtractor newExtractor(long start, long end); DataExtractor newExtractor(long start, long end);

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHitField;

View File

@ -3,11 +3,11 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -68,8 +68,8 @@ class ExtractedFields {
throw new RuntimeException("Time field [" + timeField.getName() + "] expected a long value; actual was: " + value[0]); throw new RuntimeException("Time field [" + timeField.getName() + "] expected a long value; actual was: " + value[0]);
} }
public static ExtractedFields build(Job job, SchedulerConfig schedulerConfig) { public static ExtractedFields build(Job job, DatafeedConfig datafeedConfig) {
Set<String> scriptFields = schedulerConfig.getScriptFields().stream().map(sf -> sf.fieldName()).collect(Collectors.toSet()); Set<String> scriptFields = datafeedConfig.getScriptFields().stream().map(sf -> sf.fieldName()).collect(Collectors.toSet());
String timeField = job.getDataDescription().getTimeField(); String timeField = job.getDataDescription().getTimeField();
ExtractedField timeExtractedField = ExtractedField.newField(timeField, scriptFields.contains(timeField) ? ExtractedField timeExtractedField = ExtractedField.newField(timeField, scriptFields.contains(timeField) ?
ExtractedField.ExtractionMethod.SCRIPT_FIELD : ExtractedField.ExtractionMethod.DOC_VALUE); ExtractedField.ExtractionMethod.SCRIPT_FIELD : ExtractedField.ExtractionMethod.DOC_VALUE);
@ -78,7 +78,7 @@ class ExtractedFields {
allExtractedFields.add(timeExtractedField); allExtractedFields.add(timeExtractedField);
for (String field : remainingFields) { for (String field : remainingFields) {
ExtractedField.ExtractionMethod method = scriptFields.contains(field) ? ExtractedField.ExtractionMethod.SCRIPT_FIELD : ExtractedField.ExtractionMethod method = scriptFields.contains(field) ? ExtractedField.ExtractionMethod.SCRIPT_FIELD :
schedulerConfig.isSource() ? ExtractedField.ExtractionMethod.SOURCE : ExtractedField.ExtractionMethod.DOC_VALUE; datafeedConfig.isSource() ? ExtractedField.ExtractionMethod.SOURCE : ExtractedField.ExtractionMethod.DOC_VALUE;
allExtractedFields.add(ExtractedField.newField(field, method)); allExtractedFields.add(ExtractedField.newField(field, method));
} }
return new ExtractedFields(timeExtractedField, allExtractedFields); return new ExtractedFields(timeExtractedField, allExtractedFields);

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollAction;
@ -21,7 +21,7 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;

View File

@ -3,28 +3,28 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import java.util.Objects; import java.util.Objects;
public class ScrollDataExtractorFactory implements DataExtractorFactory { public class ScrollDataExtractorFactory implements DataExtractorFactory {
private final Client client; private final Client client;
private final SchedulerConfig schedulerConfig; private final DatafeedConfig datafeedConfig;
private final Job job; private final Job job;
private final ExtractedFields extractedFields; private final ExtractedFields extractedFields;
public ScrollDataExtractorFactory(Client client, SchedulerConfig schedulerConfig, Job job) { public ScrollDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) {
this.client = Objects.requireNonNull(client); this.client = Objects.requireNonNull(client);
this.schedulerConfig = Objects.requireNonNull(schedulerConfig); this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
this.job = Objects.requireNonNull(job); this.job = Objects.requireNonNull(job);
this.extractedFields = ExtractedFields.build(job, schedulerConfig); this.extractedFields = ExtractedFields.build(job, datafeedConfig);
} }
@Override @Override
@ -32,11 +32,11 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory {
ScrollDataExtractorContext dataExtractorContext = new ScrollDataExtractorContext( ScrollDataExtractorContext dataExtractorContext = new ScrollDataExtractorContext(
job.getId(), job.getId(),
extractedFields, extractedFields,
schedulerConfig.getIndexes(), datafeedConfig.getIndexes(),
schedulerConfig.getTypes(), datafeedConfig.getTypes(),
schedulerConfig.getQuery(), datafeedConfig.getQuery(),
schedulerConfig.getScriptFields(), datafeedConfig.getScriptFields(),
schedulerConfig.getScrollSize(), datafeedConfig.getScrollSize(),
start, start,
end); end);
return new ScrollDataExtractor(client, dataExtractorContext); return new ScrollDataExtractor(client, dataExtractorContext);

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;

View File

@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.job.config;
import java.time.Duration; import java.time.Duration;
/** /**
* Factory methods for a sensible default for the scheduler frequency * Factory methods for a sensible default for the datafeed frequency
*/ */
public final class DefaultFrequency { public final class DefaultFrequency {
private static final int SECONDS_IN_MINUTE = 60; private static final int SECONDS_IN_MINUTE = 60;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.UpdateJobStatusAction; import org.elasticsearch.xpack.ml.action.UpdateJobStatusAction;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction; import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction;
import org.elasticsearch.xpack.ml.job.IgnoreDowntime; import org.elasticsearch.xpack.ml.job.IgnoreDowntime;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.JobStatus; import org.elasticsearch.xpack.ml.job.JobStatus;
@ -39,7 +39,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage; import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.util.Collections; import java.util.Collections;
@ -56,7 +56,7 @@ import java.util.stream.Collectors;
* <li>creation</li> * <li>creation</li>
* <li>deletion</li> * <li>deletion</li>
* <li>updating</li> * <li>updating</li>
* <li>starting/stopping of scheduled jobs</li> * <li>starting/stopping of datafeed jobs</li>
* </ul> * </ul>
*/ */
public class JobManager extends AbstractComponent { public class JobManager extends AbstractComponent {
@ -305,23 +305,23 @@ public class JobManager extends AbstractComponent {
return buildNewClusterState(currentState, builder); return buildNewClusterState(currentState, builder);
} }
public void updateSchedulerStatus(UpdateSchedulerStatusAction.Request request, public void updateDatafeedStatus(UpdateDatafeedStatusAction.Request request,
ActionListener<UpdateSchedulerStatusAction.Response> actionListener) { ActionListener<UpdateDatafeedStatusAction.Response> actionListener) {
String schedulerId = request.getSchedulerId(); String datafeedId = request.getDatafeedId();
SchedulerStatus newStatus = request.getSchedulerStatus(); DatafeedStatus newStatus = request.getDatafeedStatus();
clusterService.submitStateUpdateTask("update-scheduler-status-" + schedulerId, clusterService.submitStateUpdateTask("update-datafeed-status-" + datafeedId,
new AckedClusterStateUpdateTask<UpdateSchedulerStatusAction.Response>(request, actionListener) { new AckedClusterStateUpdateTask<UpdateDatafeedStatusAction.Response>(request, actionListener) {
@Override @Override
public ClusterState execute(ClusterState currentState) throws Exception { public ClusterState execute(ClusterState currentState) throws Exception {
MlMetadata.Builder builder = createMlMetadataBuilder(currentState); MlMetadata.Builder builder = createMlMetadataBuilder(currentState);
builder.updateSchedulerStatus(schedulerId, newStatus); builder.updateDatafeedStatus(datafeedId, newStatus);
return buildNewClusterState(currentState, builder); return buildNewClusterState(currentState, builder);
} }
@Override @Override
protected UpdateSchedulerStatusAction.Response newResponse(boolean acknowledged) { protected UpdateDatafeedStatusAction.Response newResponse(boolean acknowledged) {
return new UpdateSchedulerStatusAction.Response(acknowledged); return new UpdateDatafeedStatusAction.Response(acknowledged);
} }
}); });
} }

View File

@ -44,16 +44,16 @@ public final class Messages {
public static final String JOB_AUDIT_REVERTED = "job.audit.reverted"; public static final String JOB_AUDIT_REVERTED = "job.audit.reverted";
public static final String JOB_AUDIT_OLD_RESULTS_DELETED = "job.audit.old.results.deleted"; public static final String JOB_AUDIT_OLD_RESULTS_DELETED = "job.audit.old.results.deleted";
public static final String JOB_AUDIT_SNAPSHOT_DELETED = "job.audit.snapshot.deleted"; public static final String JOB_AUDIT_SNAPSHOT_DELETED = "job.audit.snapshot.deleted";
public static final String JOB_AUDIT_SCHEDULER_STARTED_FROM_TO = "job.audit.scheduler.started.from.to"; public static final String JOB_AUDIT_DATAFEED_STARTED_FROM_TO = "job.audit.datafeed.started.from.to";
public static final String JOB_AUDIT_SCHEDULER_CONTINUED_REALTIME = "job.audit.scheduler.continued.realtime"; public static final String JOB_AUDIT_DATAFEED_CONTINUED_REALTIME = "job.audit.datafeed.continued.realtime";
public static final String JOB_AUDIT_SCHEDULER_STARTED_REALTIME = "job.audit.scheduler.started.realtime"; public static final String JOB_AUDIT_DATAFEED_STARTED_REALTIME = "job.audit.datafeed.started.realtime";
public static final String JOB_AUDIT_SCHEDULER_LOOKBACK_COMPLETED = "job.audit.scheduler.lookback.completed"; public static final String JOB_AUDIT_DATAFEED_LOOKBACK_COMPLETED = "job.audit.datafeed.lookback.completed";
public static final String JOB_AUDIT_SCHEDULER_STOPPED = "job.audit.scheduler.stopped"; public static final String JOB_AUDIT_DATAFEED_STOPPED = "job.audit.datafeed.stopped";
public static final String JOB_AUDIT_SCHEDULER_NO_DATA = "job.audit.scheduler.no.data"; public static final String JOB_AUDIT_DATAFEED_NO_DATA = "job.audit.datafeed.no.data";
public static final String JOB_AUDIR_SCHEDULER_DATA_SEEN_AGAIN = "job.audit.scheduler.data.seen.again"; public static final String JOB_AUDIR_DATAFEED_DATA_SEEN_AGAIN = "job.audit.datafeed.data.seen.again";
public static final String JOB_AUDIT_SCHEDULER_DATA_ANALYSIS_ERROR = "job.audit.scheduler.data.analysis.error"; public static final String JOB_AUDIT_DATAFEED_DATA_ANALYSIS_ERROR = "job.audit.datafeed.data.analysis.error";
public static final String JOB_AUDIT_SCHEDULER_DATA_EXTRACTION_ERROR = "job.audit.scheduler.data.extraction.error"; public static final String JOB_AUDIT_DATAFEED_DATA_EXTRACTION_ERROR = "job.audit.datafeed.data.extraction.error";
public static final String JOB_AUDIT_SCHEDULER_RECOVERED = "job.audit.scheduler.recovered"; public static final String JOB_AUDIT_DATAFEED_RECOVERED = "job.audit.datafeed.recovered";
public static final String SYSTEM_AUDIT_STARTED = "system.audit.started"; public static final String SYSTEM_AUDIT_STARTED = "system.audit.started";
public static final String SYSTEM_AUDIT_SHUTDOWN = "system.audit.shutdown"; public static final String SYSTEM_AUDIT_SHUTDOWN = "system.audit.shutdown";
@ -155,8 +155,8 @@ public final class Messages {
public static final String JOB_CONFIG_UPDATE_MODEL_SNAPSHOT_RETENTION_DAYS_INVALID = "job.config.update.model.snapshot.retention.days." public static final String JOB_CONFIG_UPDATE_MODEL_SNAPSHOT_RETENTION_DAYS_INVALID = "job.config.update.model.snapshot.retention.days."
+ "invalid"; + "invalid";
public static final String JOB_CONFIG_UPDATE_RESULTS_RETENTION_DAYS_INVALID = "job.config.update.results.retention.days.invalid"; public static final String JOB_CONFIG_UPDATE_RESULTS_RETENTION_DAYS_INVALID = "job.config.update.results.retention.days.invalid";
public static final String JOB_CONFIG_UPDATE_SCHEDULE_CONFIG_PARSE_ERROR = "job.config.update.scheduler.config.parse.error"; public static final String JOB_CONFIG_UPDATE_DATAFEED_CONFIG_PARSE_ERROR = "job.config.update.datafeed.config.parse.error";
public static final String JOB_CONFIG_UPDATE_SCHEDULE_CONFIG_CANNOT_BE_NULL = "job.config.update.scheduler.config.cannot.be.null"; public static final String JOB_CONFIG_UPDATE_DATAFEED_CONFIG_CANNOT_BE_NULL = "job.config.update.datafeed.config.cannot.be.null";
public static final String JOB_CONFIG_TRANSFORM_CIRCULAR_DEPENDENCY = "job.config.transform.circular.dependency"; public static final String JOB_CONFIG_TRANSFORM_CIRCULAR_DEPENDENCY = "job.config.transform.circular.dependency";
public static final String JOB_CONFIG_TRANSFORM_CONDITION_REQUIRED = "job.config.transform.condition.required"; public static final String JOB_CONFIG_TRANSFORM_CONDITION_REQUIRED = "job.config.transform.condition.required";
@ -185,18 +185,18 @@ public final class Messages {
public static final String JOB_DATA_CONCURRENT_USE_UPDATE = "job.data.concurrent.use.update"; public static final String JOB_DATA_CONCURRENT_USE_UPDATE = "job.data.concurrent.use.update";
public static final String JOB_DATA_CONCURRENT_USE_UPLOAD = "job.data.concurrent.use.upload"; public static final String JOB_DATA_CONCURRENT_USE_UPLOAD = "job.data.concurrent.use.upload";
public static final String SCHEDULER_CONFIG_INVALID_OPTION_VALUE = "scheduler.config.invalid.option.value"; public static final String DATAFEED_CONFIG_INVALID_OPTION_VALUE = "datafeed.config.invalid.option.value";
public static final String SCHEDULER_DOES_NOT_SUPPORT_JOB_WITH_LATENCY = "scheduler.does.not.support.job.with.latency"; public static final String DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY = "datafeed.does.not.support.job.with.latency";
public static final String SCHEDULER_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD = public static final String DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD =
"scheduler.aggregations.requires.job.with.summary.count.field"; "datafeed.aggregations.requires.job.with.summary.count.field";
public static final String SCHEDULER_CANNOT_START = "scheduler.cannot.start"; public static final String DATAFEED_CANNOT_START = "datafeed.cannot.start";
public static final String SCHEDULER_CANNOT_STOP_IN_CURRENT_STATE = "scheduler.cannot.stop.in.current.state"; public static final String DATAFEED_CANNOT_STOP_IN_CURRENT_STATE = "datafeed.cannot.stop.in.current.state";
public static final String SCHEDULER_CANNOT_UPDATE_IN_CURRENT_STATE = "scheduler.cannot.update.in.current.state"; public static final String DATAFEED_CANNOT_UPDATE_IN_CURRENT_STATE = "datafeed.cannot.update.in.current.state";
public static final String SCHEDULER_CANNOT_DELETE_IN_CURRENT_STATE = "scheduler.cannot.delete.in.current.state"; public static final String DATAFEED_CANNOT_DELETE_IN_CURRENT_STATE = "datafeed.cannot.delete.in.current.state";
public static final String SCHEDULER_FAILED_TO_STOP = "scheduler.failed.to.stop"; public static final String DATAFEED_FAILED_TO_STOP = "datafeed.failed.to.stop";
public static final String SCHEDULER_NOT_FOUND = "scheduler.not.found"; public static final String DATAFEED_NOT_FOUND = "datafeed.not.found";
public static final String JOB_MISSING_QUANTILES = "job.missing.quantiles"; public static final String JOB_MISSING_QUANTILES = "job.missing.quantiles";
public static final String JOB_UNKNOWN_ID = "job.unknown.id"; public static final String JOB_UNKNOWN_ID = "job.unknown.id";
@ -213,7 +213,7 @@ public final class Messages {
public static final String JSON_TRANSFORM_CONFIG_MAPPING = "json.transform.config.mapping.error"; public static final String JSON_TRANSFORM_CONFIG_MAPPING = "json.transform.config.mapping.error";
public static final String JSON_TRANSFORM_CONFIG_PARSE = "json.transform.config.parse.error"; public static final String JSON_TRANSFORM_CONFIG_PARSE = "json.transform.config.parse.error";
public static final String REST_ACTION_NOT_ALLOWED_FOR_SCHEDULED_JOB = "rest.action.not.allowed.for.scheduled.job"; public static final String REST_ACTION_NOT_ALLOWED_FOR_DATAFEED_JOB = "rest.action.not.allowed.for.datafeed.job";
public static final String REST_INVALID_DATETIME_PARAMS = "rest.invalid.datetime.params"; public static final String REST_INVALID_DATETIME_PARAMS = "rest.invalid.datetime.params";
public static final String REST_INVALID_FLUSH_PARAMS_MISSING = "rest.invalid.flush.params.missing.argument"; public static final String REST_INVALID_FLUSH_PARAMS_MISSING = "rest.invalid.flush.params.missing.argument";

View File

@ -23,10 +23,10 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.JobStatus; import org.elasticsearch.xpack.ml.job.JobStatus;
import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobValidator; import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator;
import org.elasticsearch.xpack.ml.scheduler.Scheduler; import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException; import java.io.IOException;
@ -44,7 +44,7 @@ public class MlMetadata implements MetaData.Custom {
private static final ParseField JOBS_FIELD = new ParseField("jobs"); private static final ParseField JOBS_FIELD = new ParseField("jobs");
private static final ParseField ALLOCATIONS_FIELD = new ParseField("allocations"); private static final ParseField ALLOCATIONS_FIELD = new ParseField("allocations");
private static final ParseField SCHEDULERS_FIELD = new ParseField("schedulers"); private static final ParseField DATAFEEDS_FIELD = new ParseField("datafeeds");
public static final String TYPE = "ml"; public static final String TYPE = "ml";
public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(), public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(),
@ -56,18 +56,18 @@ public class MlMetadata implements MetaData.Custom {
static { static {
ML_METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.PARSER.apply(p, c).build(), JOBS_FIELD); ML_METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.PARSER.apply(p, c).build(), JOBS_FIELD);
ML_METADATA_PARSER.declareObjectArray(Builder::putAllocations, Allocation.PARSER, ALLOCATIONS_FIELD); ML_METADATA_PARSER.declareObjectArray(Builder::putAllocations, Allocation.PARSER, ALLOCATIONS_FIELD);
ML_METADATA_PARSER.declareObjectArray(Builder::putSchedulers, Scheduler.PARSER, SCHEDULERS_FIELD); ML_METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, Datafeed.PARSER, DATAFEEDS_FIELD);
} }
private final SortedMap<String, Job> jobs; private final SortedMap<String, Job> jobs;
private final SortedMap<String, Allocation> allocations; private final SortedMap<String, Allocation> allocations;
private final SortedMap<String, Scheduler> schedulers; private final SortedMap<String, Datafeed> datafeeds;
private MlMetadata(SortedMap<String, Job> jobs, SortedMap<String, Allocation> allocations, private MlMetadata(SortedMap<String, Job> jobs, SortedMap<String, Allocation> allocations,
SortedMap<String, Scheduler> schedulers) { SortedMap<String, Datafeed> datafeeds) {
this.jobs = Collections.unmodifiableSortedMap(jobs); this.jobs = Collections.unmodifiableSortedMap(jobs);
this.allocations = Collections.unmodifiableSortedMap(allocations); this.allocations = Collections.unmodifiableSortedMap(allocations);
this.schedulers = Collections.unmodifiableSortedMap(schedulers); this.datafeeds = Collections.unmodifiableSortedMap(datafeeds);
} }
public Map<String, Job> getJobs() { public Map<String, Job> getJobs() {
@ -78,12 +78,12 @@ public class MlMetadata implements MetaData.Custom {
return allocations; return allocations;
} }
public SortedMap<String, Scheduler> getSchedulers() { public SortedMap<String, Datafeed> getDatafeeds() {
return schedulers; return datafeeds;
} }
public Scheduler getScheduler(String schedulerId) { public Datafeed getDatafeed(String datafeedId) {
return schedulers.get(schedulerId); return datafeeds.get(datafeedId);
} }
@Override @Override
@ -117,18 +117,18 @@ public class MlMetadata implements MetaData.Custom {
} }
this.allocations = allocations; this.allocations = allocations;
size = in.readVInt(); size = in.readVInt();
TreeMap<String, Scheduler> schedulers = new TreeMap<>(); TreeMap<String, Datafeed> datafeeds = new TreeMap<>();
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
schedulers.put(in.readString(), new Scheduler(in)); datafeeds.put(in.readString(), new Datafeed(in));
} }
this.schedulers = schedulers; this.datafeeds = datafeeds;
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
writeMap(jobs, out); writeMap(jobs, out);
writeMap(allocations, out); writeMap(allocations, out);
writeMap(schedulers, out); writeMap(datafeeds, out);
} }
private static <T extends Writeable> void writeMap(Map<String, T> map, StreamOutput out) throws IOException { private static <T extends Writeable> void writeMap(Map<String, T> map, StreamOutput out) throws IOException {
@ -143,7 +143,7 @@ public class MlMetadata implements MetaData.Custom {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
mapValuesToXContent(JOBS_FIELD, jobs, builder, params); mapValuesToXContent(JOBS_FIELD, jobs, builder, params);
mapValuesToXContent(ALLOCATIONS_FIELD, allocations, builder, params); mapValuesToXContent(ALLOCATIONS_FIELD, allocations, builder, params);
mapValuesToXContent(SCHEDULERS_FIELD, schedulers, builder, params); mapValuesToXContent(DATAFEEDS_FIELD, datafeeds, builder, params);
return builder; return builder;
} }
@ -160,27 +160,27 @@ public class MlMetadata implements MetaData.Custom {
final Diff<Map<String, Job>> jobs; final Diff<Map<String, Job>> jobs;
final Diff<Map<String, Allocation>> allocations; final Diff<Map<String, Allocation>> allocations;
final Diff<Map<String, Scheduler>> schedulers; final Diff<Map<String, Datafeed>> datafeeds;
MlMetadataDiff(MlMetadata before, MlMetadata after) { MlMetadataDiff(MlMetadata before, MlMetadata after) {
this.jobs = DiffableUtils.diff(before.jobs, after.jobs, DiffableUtils.getStringKeySerializer()); this.jobs = DiffableUtils.diff(before.jobs, after.jobs, DiffableUtils.getStringKeySerializer());
this.allocations = DiffableUtils.diff(before.allocations, after.allocations, DiffableUtils.getStringKeySerializer()); this.allocations = DiffableUtils.diff(before.allocations, after.allocations, DiffableUtils.getStringKeySerializer());
this.schedulers = DiffableUtils.diff(before.schedulers, after.schedulers, DiffableUtils.getStringKeySerializer()); this.datafeeds = DiffableUtils.diff(before.datafeeds, after.datafeeds, DiffableUtils.getStringKeySerializer());
} }
@Override @Override
public MetaData.Custom apply(MetaData.Custom part) { public MetaData.Custom apply(MetaData.Custom part) {
TreeMap<String, Job> newJobs = new TreeMap<>(jobs.apply(((MlMetadata) part).jobs)); TreeMap<String, Job> newJobs = new TreeMap<>(jobs.apply(((MlMetadata) part).jobs));
TreeMap<String, Allocation> newAllocations = new TreeMap<>(allocations.apply(((MlMetadata) part).allocations)); TreeMap<String, Allocation> newAllocations = new TreeMap<>(allocations.apply(((MlMetadata) part).allocations));
TreeMap<String, Scheduler> newSchedulers = new TreeMap<>(schedulers.apply(((MlMetadata) part).schedulers)); TreeMap<String, Datafeed> newDatafeeds = new TreeMap<>(datafeeds.apply(((MlMetadata) part).datafeeds));
return new MlMetadata(newJobs, newAllocations, newSchedulers); return new MlMetadata(newJobs, newAllocations, newDatafeeds);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
jobs.writeTo(out); jobs.writeTo(out);
allocations.writeTo(out); allocations.writeTo(out);
schedulers.writeTo(out); datafeeds.writeTo(out);
} }
} }
@ -193,30 +193,30 @@ public class MlMetadata implements MetaData.Custom {
MlMetadata that = (MlMetadata) o; MlMetadata that = (MlMetadata) o;
return Objects.equals(jobs, that.jobs) && return Objects.equals(jobs, that.jobs) &&
Objects.equals(allocations, that.allocations) && Objects.equals(allocations, that.allocations) &&
Objects.equals(schedulers, that.schedulers); Objects.equals(datafeeds, that.datafeeds);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(jobs, allocations, schedulers); return Objects.hash(jobs, allocations, datafeeds);
} }
public static class Builder { public static class Builder {
private TreeMap<String, Job> jobs; private TreeMap<String, Job> jobs;
private TreeMap<String, Allocation> allocations; private TreeMap<String, Allocation> allocations;
private TreeMap<String, Scheduler> schedulers; private TreeMap<String, Datafeed> datafeeds;
public Builder() { public Builder() {
this.jobs = new TreeMap<>(); this.jobs = new TreeMap<>();
this.allocations = new TreeMap<>(); this.allocations = new TreeMap<>();
this.schedulers = new TreeMap<>(); this.datafeeds = new TreeMap<>();
} }
public Builder(MlMetadata previous) { public Builder(MlMetadata previous) {
jobs = new TreeMap<>(previous.jobs); jobs = new TreeMap<>(previous.jobs);
allocations = new TreeMap<>(previous.allocations); allocations = new TreeMap<>(previous.allocations);
schedulers = new TreeMap<>(previous.schedulers); datafeeds = new TreeMap<>(previous.datafeeds);
} }
public Builder putJob(Job job, boolean overwrite) { public Builder putJob(Job job, boolean overwrite) {
@ -241,10 +241,10 @@ public class MlMetadata implements MetaData.Custom {
throw new ResourceNotFoundException("job [" + jobId + "] does not exist"); throw new ResourceNotFoundException("job [" + jobId + "] does not exist");
} }
Optional<Scheduler> scheduler = getSchedulerByJobId(jobId); Optional<Datafeed> datafeed = getDatafeedByJobId(jobId);
if (scheduler.isPresent()) { if (datafeed.isPresent()) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while scheduler [" throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
+ scheduler.get().getId() + "] refers to it"); + datafeed.get().getId() + "] refers to it");
} }
Allocation previousAllocation = this.allocations.remove(jobId); Allocation previousAllocation = this.allocations.remove(jobId);
@ -260,45 +260,45 @@ public class MlMetadata implements MetaData.Custom {
return this; return this;
} }
public Builder putScheduler(SchedulerConfig schedulerConfig) { public Builder putDatafeed(DatafeedConfig datafeedConfig) {
if (schedulers.containsKey(schedulerConfig.getId())) { if (datafeeds.containsKey(datafeedConfig.getId())) {
throw new ResourceAlreadyExistsException("A scheduler with id [" + schedulerConfig.getId() + "] already exists"); throw new ResourceAlreadyExistsException("A datafeed with id [" + datafeedConfig.getId() + "] already exists");
} }
String jobId = schedulerConfig.getJobId(); String jobId = datafeedConfig.getJobId();
Job job = jobs.get(jobId); Job job = jobs.get(jobId);
if (job == null) { if (job == null) {
throw ExceptionsHelper.missingJobException(jobId); throw ExceptionsHelper.missingJobException(jobId);
} }
Optional<Scheduler> existingScheduler = getSchedulerByJobId(jobId); Optional<Datafeed> existingDatafeed = getDatafeedByJobId(jobId);
if (existingScheduler.isPresent()) { if (existingDatafeed.isPresent()) {
throw ExceptionsHelper.conflictStatusException("A scheduler [" + existingScheduler.get().getId() throw ExceptionsHelper.conflictStatusException("A datafeed [" + existingDatafeed.get().getId()
+ "] already exists for job [" + jobId + "]"); + "] already exists for job [" + jobId + "]");
} }
ScheduledJobValidator.validate(schedulerConfig, job); DatafeedJobValidator.validate(datafeedConfig, job);
return putScheduler(new Scheduler(schedulerConfig, SchedulerStatus.STOPPED)); return putDatafeed(new Datafeed(datafeedConfig, DatafeedStatus.STOPPED));
} }
private Builder putScheduler(Scheduler scheduler) { private Builder putDatafeed(Datafeed datafeed) {
schedulers.put(scheduler.getId(), scheduler); datafeeds.put(datafeed.getId(), datafeed);
return this; return this;
} }
public Builder removeScheduler(String schedulerId) { public Builder removeDatafeed(String datafeedId) {
Scheduler scheduler = schedulers.get(schedulerId); Datafeed datafeed = datafeeds.get(datafeedId);
if (scheduler == null) { if (datafeed == null) {
throw ExceptionsHelper.missingSchedulerException(schedulerId); throw ExceptionsHelper.missingDatafeedException(datafeedId);
} }
if (scheduler.getStatus() != SchedulerStatus.STOPPED) { if (datafeed.getStatus() != DatafeedStatus.STOPPED) {
String msg = Messages.getMessage(Messages.SCHEDULER_CANNOT_DELETE_IN_CURRENT_STATE, schedulerId, scheduler.getStatus()); String msg = Messages.getMessage(Messages.DATAFEED_CANNOT_DELETE_IN_CURRENT_STATE, datafeedId, datafeed.getStatus());
throw ExceptionsHelper.conflictStatusException(msg); throw ExceptionsHelper.conflictStatusException(msg);
} }
schedulers.remove(schedulerId); datafeeds.remove(datafeedId);
return this; return this;
} }
private Optional<Scheduler> getSchedulerByJobId(String jobId) { private Optional<Datafeed> getDatafeedByJobId(String jobId) {
return schedulers.values().stream().filter(s -> s.getJobId().equals(jobId)).findFirst(); return datafeeds.values().stream().filter(s -> s.getJobId().equals(jobId)).findFirst();
} }
// only for parsing // only for parsing
@ -317,15 +317,15 @@ public class MlMetadata implements MetaData.Custom {
return this; return this;
} }
private Builder putSchedulers(Collection<Scheduler> schedulers) { private Builder putDatafeeds(Collection<Datafeed> datafeeds) {
for (Scheduler scheduler : schedulers) { for (Datafeed datafeed : datafeeds) {
putScheduler(scheduler); putDatafeed(datafeed);
} }
return this; return this;
} }
public MlMetadata build() { public MlMetadata build() {
return new MlMetadata(jobs, allocations, schedulers); return new MlMetadata(jobs, allocations, datafeeds);
} }
public Builder assignToNode(String jobId, String nodeId) { public Builder assignToNode(String jobId, String nodeId) {
@ -349,12 +349,12 @@ public class MlMetadata implements MetaData.Custom {
throw new IllegalStateException("[" + jobId + "] no allocation exist to update the status to [" + jobStatus + "]"); throw new IllegalStateException("[" + jobId + "] no allocation exist to update the status to [" + jobStatus + "]");
} }
// Cannot update the status to DELETING if there are schedulers attached // Cannot update the status to DELETING if there are datafeeds attached
if (jobStatus.equals(JobStatus.DELETING)) { if (jobStatus.equals(JobStatus.DELETING)) {
Optional<Scheduler> scheduler = getSchedulerByJobId(jobId); Optional<Datafeed> datafeed = getDatafeedByJobId(jobId);
if (scheduler.isPresent()) { if (datafeed.isPresent()) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while scheduler [" throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
+ scheduler.get().getId() + "] refers to it"); + datafeed.get().getId() + "] refers to it");
} }
} }
@ -397,32 +397,32 @@ public class MlMetadata implements MetaData.Custom {
return this; return this;
} }
public Builder updateSchedulerStatus(String schedulerId, SchedulerStatus newStatus) { public Builder updateDatafeedStatus(String datafeedId, DatafeedStatus newStatus) {
Scheduler scheduler = schedulers.get(schedulerId); Datafeed datafeed = datafeeds.get(datafeedId);
if (scheduler == null) { if (datafeed == null) {
throw ExceptionsHelper.missingSchedulerException(schedulerId); throw ExceptionsHelper.missingDatafeedException(datafeedId);
} }
SchedulerStatus currentStatus = scheduler.getStatus(); DatafeedStatus currentStatus = datafeed.getStatus();
switch (newStatus) { switch (newStatus) {
case STARTED: case STARTED:
if (currentStatus != SchedulerStatus.STOPPED) { if (currentStatus != DatafeedStatus.STOPPED) {
String msg = Messages.getMessage(Messages.SCHEDULER_CANNOT_START, schedulerId, newStatus); String msg = Messages.getMessage(Messages.DATAFEED_CANNOT_START, datafeedId, newStatus);
throw ExceptionsHelper.conflictStatusException(msg); throw ExceptionsHelper.conflictStatusException(msg);
} }
break; break;
case STOPPED: case STOPPED:
if (currentStatus != SchedulerStatus.STARTED) { if (currentStatus != DatafeedStatus.STARTED) {
String msg = Messages.getMessage(Messages.SCHEDULER_CANNOT_STOP_IN_CURRENT_STATE, schedulerId, newStatus); String msg = Messages.getMessage(Messages.DATAFEED_CANNOT_STOP_IN_CURRENT_STATE, datafeedId, newStatus);
throw ExceptionsHelper.conflictStatusException(msg); throw ExceptionsHelper.conflictStatusException(msg);
} }
break; break;
default: default:
throw new IllegalArgumentException("[" + schedulerId + "] requested invalid scheduler status [" + newStatus + "]"); throw new IllegalArgumentException("[" + datafeedId + "] requested invalid datafeed status [" + newStatus + "]");
} }
schedulers.put(schedulerId, new Scheduler(scheduler.getConfig(), newStatus)); datafeeds.put(datafeedId, new Datafeed(datafeed.getConfig(), newStatus));
return this; return this;
} }
} }
} }

View File

@ -38,9 +38,9 @@ import java.util.function.Function;
* changes when each of the reporting stages are passed. If the * changes when each of the reporting stages are passed. If the
* function returns {@code true} the usage is logged. * function returns {@code true} the usage is logged.
* *
* DataCounts are persisted periodically in a scheduled task via * DataCounts are persisted periodically in a datafeed task via
* {@linkplain JobDataCountsPersister}, {@link #close()} must be called to * {@linkplain JobDataCountsPersister}, {@link #close()} must be called to
* cancel the scheduled task. * cancel the datafeed task.
*/ */
public class StatusReporter extends AbstractComponent implements Closeable { public class StatusReporter extends AbstractComponent implements Closeable {
/** /**
@ -78,7 +78,7 @@ public class StatusReporter extends AbstractComponent implements Closeable {
private Function<Long, Boolean> reportingBoundaryFunction; private Function<Long, Boolean> reportingBoundaryFunction;
private volatile boolean persistDataCountsOnNextRecord; private volatile boolean persistDataCountsOnNextRecord;
private final ThreadPool.Cancellable persistDataCountsScheduledAction; private final ThreadPool.Cancellable persistDataCountsDatafeedAction;
public StatusReporter(ThreadPool threadPool, Settings settings, String jobId, DataCounts counts, UsageReporter usageReporter, public StatusReporter(ThreadPool threadPool, Settings settings, String jobId, DataCounts counts, UsageReporter usageReporter,
JobDataCountsPersister dataCountsPersister) { JobDataCountsPersister dataCountsPersister) {
@ -97,7 +97,7 @@ public class StatusReporter extends AbstractComponent implements Closeable {
reportingBoundaryFunction = this::reportEvery100Records; reportingBoundaryFunction = this::reportEvery100Records;
persistDataCountsScheduledAction = threadPool.scheduleWithFixedDelay(() -> persistDataCountsOnNextRecord = true, persistDataCountsDatafeedAction = threadPool.scheduleWithFixedDelay(() -> persistDataCountsOnNextRecord = true,
PERSIST_INTERVAL, ThreadPool.Names.GENERIC); PERSIST_INTERVAL, ThreadPool.Names.GENERIC);
} }
@ -353,7 +353,7 @@ public class StatusReporter extends AbstractComponent implements Closeable {
@Override @Override
public void close() { public void close() {
persistDataCountsScheduledAction.cancel(); persistDataCountsDatafeedAction.cancel();
} }
/** /**

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.rest.schedulers; package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -13,25 +13,25 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.AcknowledgedRestListener; import org.elasticsearch.rest.action.AcknowledgedRestListener;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.DeleteSchedulerAction; import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException; import java.io.IOException;
public class RestDeleteSchedulerAction extends BaseRestHandler { public class RestDeleteDatafeedAction extends BaseRestHandler {
@Inject @Inject
public RestDeleteSchedulerAction(Settings settings, RestController controller) { public RestDeleteDatafeedAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.DELETE, MlPlugin.BASE_PATH + "schedulers/{" controller.registerHandler(RestRequest.Method.DELETE, MlPlugin.BASE_PATH + "datafeeds/{"
+ SchedulerConfig.ID.getPreferredName() + "}", this); + DatafeedConfig.ID.getPreferredName() + "}", this);
} }
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName()); String datafeedId = restRequest.param(DatafeedConfig.ID.getPreferredName());
DeleteSchedulerAction.Request deleteSchedulerRequest = new DeleteSchedulerAction.Request(schedulerId); DeleteDatafeedAction.Request deleteDatafeedRequest = new DeleteDatafeedAction.Request(datafeedId);
return channel -> client.execute(DeleteSchedulerAction.INSTANCE, deleteSchedulerRequest, new AcknowledgedRestListener<>(channel)); return channel -> client.execute(DeleteDatafeedAction.INSTANCE, deleteDatafeedRequest, new AcknowledgedRestListener<>(channel));
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.rest.schedulers; package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -13,23 +13,23 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.GetSchedulersAction; import org.elasticsearch.xpack.ml.action.GetDatafeedsAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException; import java.io.IOException;
public class RestGetSchedulersAction extends BaseRestHandler { public class RestGetDatafeedsAction extends BaseRestHandler {
@Inject @Inject
public RestGetSchedulersAction(Settings settings, RestController controller) { public RestGetDatafeedsAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH
+ "schedulers/{" + SchedulerConfig.ID.getPreferredName() + "}", this); + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this);
} }
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
GetSchedulersAction.Request request = new GetSchedulersAction.Request(restRequest.param(SchedulerConfig.ID.getPreferredName())); GetDatafeedsAction.Request request = new GetDatafeedsAction.Request(restRequest.param(DatafeedConfig.ID.getPreferredName()));
return channel -> client.execute(GetSchedulersAction.INSTANCE, request, new RestToXContentListener<>(channel)); return channel -> client.execute(GetDatafeedsAction.INSTANCE, request, new RestToXContentListener<>(channel));
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.rest.schedulers; package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -13,24 +13,24 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.GetSchedulersStatsAction; import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException; import java.io.IOException;
public class RestGetSchedulersStatsAction extends BaseRestHandler { public class RestGetDatafeedsStatsAction extends BaseRestHandler {
@Inject @Inject
public RestGetSchedulersStatsAction(Settings settings, RestController controller) { public RestGetDatafeedsStatsAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH
+ "schedulers/{" + SchedulerConfig.ID.getPreferredName() + "}/_stats", this); + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats", this);
} }
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
GetSchedulersStatsAction.Request request = new GetSchedulersStatsAction.Request( GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(
restRequest.param(SchedulerConfig.ID.getPreferredName())); restRequest.param(DatafeedConfig.ID.getPreferredName()));
return channel -> client.execute(GetSchedulersStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); return channel -> client.execute(GetDatafeedsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel));
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.rest.schedulers; package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -14,26 +14,26 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.PutSchedulerAction; import org.elasticsearch.xpack.ml.action.PutDatafeedAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException; import java.io.IOException;
public class RestPutSchedulerAction extends BaseRestHandler { public class RestPutDatafeedAction extends BaseRestHandler {
@Inject @Inject
public RestPutSchedulerAction(Settings settings, RestController controller) { public RestPutDatafeedAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.PUT, MlPlugin.BASE_PATH + "schedulers/{" controller.registerHandler(RestRequest.Method.PUT, MlPlugin.BASE_PATH + "datafeeds/{"
+ SchedulerConfig.ID.getPreferredName() + "}", this); + DatafeedConfig.ID.getPreferredName() + "}", this);
} }
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName()); String datafeedId = restRequest.param(DatafeedConfig.ID.getPreferredName());
XContentParser parser = restRequest.contentParser(); XContentParser parser = restRequest.contentParser();
PutSchedulerAction.Request putSchedulerRequest = PutSchedulerAction.Request.parseRequest(schedulerId, parser); PutDatafeedAction.Request putDatafeedRequest = PutDatafeedAction.Request.parseRequest(datafeedId, parser);
return channel -> client.execute(PutSchedulerAction.INSTANCE, putSchedulerRequest, new RestToXContentListener<>(channel)); return channel -> client.execute(PutDatafeedAction.INSTANCE, putDatafeedRequest, new RestToXContentListener<>(channel));
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.rest.schedulers; package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
@ -17,46 +17,46 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.StartSchedulerAction; import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException; import java.io.IOException;
public class RestStartSchedulerAction extends BaseRestHandler { public class RestStartDatafeedAction extends BaseRestHandler {
private static final String DEFAULT_START = "0"; private static final String DEFAULT_START = "0";
@Inject @Inject
public RestStartSchedulerAction(Settings settings, RestController controller) { public RestStartDatafeedAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.POST, controller.registerHandler(RestRequest.Method.POST,
MlPlugin.BASE_PATH + "schedulers/{" + SchedulerConfig.ID.getPreferredName() + "}/_start", this); MlPlugin.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start", this);
} }
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName()); String datafeedId = restRequest.param(DatafeedConfig.ID.getPreferredName());
StartSchedulerAction.Request jobSchedulerRequest; StartDatafeedAction.Request jobDatafeedRequest;
if (restRequest.hasContentOrSourceParam()) { if (restRequest.hasContentOrSourceParam()) {
XContentParser parser = restRequest.contentOrSourceParamParser(); XContentParser parser = restRequest.contentOrSourceParamParser();
jobSchedulerRequest = StartSchedulerAction.Request.parseRequest(schedulerId, parser); jobDatafeedRequest = StartDatafeedAction.Request.parseRequest(datafeedId, parser);
} else { } else {
long startTimeMillis = parseDateOrThrow(restRequest.param(StartSchedulerAction.START_TIME.getPreferredName(), long startTimeMillis = parseDateOrThrow(restRequest.param(StartDatafeedAction.START_TIME.getPreferredName(),
DEFAULT_START), StartSchedulerAction.START_TIME.getPreferredName()); DEFAULT_START), StartDatafeedAction.START_TIME.getPreferredName());
Long endTimeMillis = null; Long endTimeMillis = null;
if (restRequest.hasParam(StartSchedulerAction.END_TIME.getPreferredName())) { if (restRequest.hasParam(StartDatafeedAction.END_TIME.getPreferredName())) {
endTimeMillis = parseDateOrThrow(restRequest.param(StartSchedulerAction.END_TIME.getPreferredName()), endTimeMillis = parseDateOrThrow(restRequest.param(StartDatafeedAction.END_TIME.getPreferredName()),
StartSchedulerAction.END_TIME.getPreferredName()); StartDatafeedAction.END_TIME.getPreferredName());
} }
jobSchedulerRequest = new StartSchedulerAction.Request(schedulerId, startTimeMillis); jobDatafeedRequest = new StartDatafeedAction.Request(datafeedId, startTimeMillis);
jobSchedulerRequest.setEndTime(endTimeMillis); jobDatafeedRequest.setEndTime(endTimeMillis);
TimeValue startTimeout = restRequest.paramAsTime(StartSchedulerAction.START_TIMEOUT.getPreferredName(), TimeValue startTimeout = restRequest.paramAsTime(StartDatafeedAction.START_TIMEOUT.getPreferredName(),
TimeValue.timeValueSeconds(30)); TimeValue.timeValueSeconds(30));
jobSchedulerRequest.setStartTimeout(startTimeout); jobDatafeedRequest.setStartTimeout(startTimeout);
} }
return channel -> { return channel -> {
client.execute(StartSchedulerAction.INSTANCE, jobSchedulerRequest, new RestToXContentListener<>(channel)); client.execute(StartDatafeedAction.INSTANCE, jobDatafeedRequest, new RestToXContentListener<>(channel));
}; };
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.rest.schedulers; package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -14,27 +14,27 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.AcknowledgedRestListener; import org.elasticsearch.rest.action.AcknowledgedRestListener;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.StopSchedulerAction; import org.elasticsearch.xpack.ml.action.StopDatafeedAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException; import java.io.IOException;
public class RestStopSchedulerAction extends BaseRestHandler { public class RestStopDatafeedAction extends BaseRestHandler {
@Inject @Inject
public RestStopSchedulerAction(Settings settings, RestController controller) { public RestStopDatafeedAction(Settings settings, RestController controller) {
super(settings); super(settings);
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "schedulers/{" controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "datafeeds/{"
+ SchedulerConfig.ID.getPreferredName() + "}/_stop", this); + DatafeedConfig.ID.getPreferredName() + "}/_stop", this);
} }
@Override @Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
StopSchedulerAction.Request jobSchedulerRequest = new StopSchedulerAction.Request( StopDatafeedAction.Request jobDatafeedRequest = new StopDatafeedAction.Request(
restRequest.param(SchedulerConfig.ID.getPreferredName())); restRequest.param(DatafeedConfig.ID.getPreferredName()));
if (restRequest.hasParam("stop_timeout")) { if (restRequest.hasParam("stop_timeout")) {
jobSchedulerRequest.setStopTimeout(TimeValue.parseTimeValue(restRequest.param("stop_timeout"), "stop_timeout")); jobDatafeedRequest.setStopTimeout(TimeValue.parseTimeValue(restRequest.param("stop_timeout"), "stop_timeout"));
} }
return channel -> client.execute(StopSchedulerAction.INSTANCE, jobSchedulerRequest, new AcknowledgedRestListener<>(channel)); return channel -> client.execute(StopDatafeedAction.INSTANCE, jobDatafeedRequest, new AcknowledgedRestListener<>(channel));
} }
} }

View File

@ -13,25 +13,25 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.Scheduler; import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Predicate; import java.util.function.Predicate;
public class SchedulerStatusObserver { public class DatafeedStatusObserver {
private static final Logger LOGGER = Loggers.getLogger(SchedulerStatusObserver.class); private static final Logger LOGGER = Loggers.getLogger(DatafeedStatusObserver.class);
private final ThreadPool threadPool; private final ThreadPool threadPool;
private final ClusterService clusterService; private final ClusterService clusterService;
public SchedulerStatusObserver(ThreadPool threadPool, ClusterService clusterService) { public DatafeedStatusObserver(ThreadPool threadPool, ClusterService clusterService) {
this.threadPool = threadPool; this.threadPool = threadPool;
this.clusterService = clusterService; this.clusterService = clusterService;
} }
public void waitForStatus(String schedulerId, TimeValue waitTimeout, SchedulerStatus expectedStatus, Consumer<Exception> handler) { public void waitForStatus(String datafeedId, TimeValue waitTimeout, DatafeedStatus expectedStatus, Consumer<Exception> handler) {
ClusterStateObserver observer = ClusterStateObserver observer =
new ClusterStateObserver(clusterService, LOGGER, threadPool.getThreadContext()); new ClusterStateObserver(clusterService, LOGGER, threadPool.getThreadContext());
observer.waitForNextChange(new ClusterStateObserver.Listener() { observer.waitForNextChange(new ClusterStateObserver.Listener() {
@ -42,27 +42,27 @@ public class SchedulerStatusObserver {
@Override @Override
public void onClusterServiceClose() { public void onClusterServiceClose() {
Exception e = new IllegalArgumentException("Cluster service closed while waiting for scheduler status to change to [" Exception e = new IllegalArgumentException("Cluster service closed while waiting for datafeed status to change to ["
+ expectedStatus + "]"); + expectedStatus + "]");
handler.accept(new IllegalStateException(e)); handler.accept(new IllegalStateException(e));
} }
@Override @Override
public void onTimeout(TimeValue timeout) { public void onTimeout(TimeValue timeout) {
Exception e = new IllegalArgumentException("Timeout expired while waiting for scheduler status to change to [" Exception e = new IllegalArgumentException("Timeout expired while waiting for datafeed status to change to ["
+ expectedStatus + "]"); + expectedStatus + "]");
handler.accept(e); handler.accept(e);
} }
}, new SchedulerStoppedPredicate(schedulerId, expectedStatus), waitTimeout); }, new DatafeedStoppedPredicate(datafeedId, expectedStatus), waitTimeout);
} }
private static class SchedulerStoppedPredicate implements Predicate<ClusterState> { private static class DatafeedStoppedPredicate implements Predicate<ClusterState> {
private final String schedulerId; private final String datafeedId;
private final SchedulerStatus expectedStatus; private final DatafeedStatus expectedStatus;
SchedulerStoppedPredicate(String schedulerId, SchedulerStatus expectedStatus) { DatafeedStoppedPredicate(String datafeedId, DatafeedStatus expectedStatus) {
this.schedulerId = schedulerId; this.datafeedId = datafeedId;
this.expectedStatus = expectedStatus; this.expectedStatus = expectedStatus;
} }
@ -70,9 +70,9 @@ public class SchedulerStatusObserver {
public boolean test(ClusterState newState) { public boolean test(ClusterState newState) {
MlMetadata metadata = newState.getMetaData().custom(MlMetadata.TYPE); MlMetadata metadata = newState.getMetaData().custom(MlMetadata.TYPE);
if (metadata != null) { if (metadata != null) {
Scheduler scheduler = metadata.getScheduler(schedulerId); Datafeed datafeed = metadata.getDatafeed(datafeedId);
if (scheduler != null) { if (datafeed != null) {
return scheduler.getStatus() == expectedStatus; return datafeed.getStatus() == expectedStatus;
} }
} }
return false; return false;

View File

@ -25,8 +25,8 @@ public class ExceptionsHelper {
throw new ResourceAlreadyExistsException(Messages.getMessage(Messages.JOB_CONFIG_ID_ALREADY_TAKEN, jobId)); throw new ResourceAlreadyExistsException(Messages.getMessage(Messages.JOB_CONFIG_ID_ALREADY_TAKEN, jobId));
} }
public static ResourceNotFoundException missingSchedulerException(String schedulerId) { public static ResourceNotFoundException missingDatafeedException(String datafeedId) {
throw new ResourceNotFoundException(Messages.getMessage(Messages.SCHEDULER_NOT_FOUND, schedulerId)); throw new ResourceNotFoundException(Messages.getMessage(Messages.DATAFEED_NOT_FOUND, datafeedId));
} }
public static ElasticsearchException serverError(String msg) { public static ElasticsearchException serverError(String msg) {

View File

@ -27,16 +27,16 @@ job.audit.updated = Job updated: {0}
job.audit.reverted = Job model snapshot reverted to ''{0}'' job.audit.reverted = Job model snapshot reverted to ''{0}''
job.audit.old.results.deleted = Deleted results prior to {0} job.audit.old.results.deleted = Deleted results prior to {0}
job.audit.snapshot.deleted = Job model snapshot ''{0}'' deleted job.audit.snapshot.deleted = Job model snapshot ''{0}'' deleted
job.audit.scheduler.started.from.to = Scheduler started (from: {0} to: {1}) job.audit.datafeed.started.from.to = Datafeed started (from: {0} to: {1})
job.audit.scheduler.started.realtime = Scheduler started in real-time job.audit.datafeed.started.realtime = Datafeed started in real-time
job.audit.scheduler.continued.realtime = Scheduler continued in real-time job.audit.datafeed.continued.realtime = Datafeed continued in real-time
job.audit.scheduler.lookback.completed = Scheduler lookback completed job.audit.datafeed.lookback.completed = Datafeed lookback completed
job.audit.scheduler.stopped = Scheduler stopped job.audit.datafeed.stopped = Datafeed stopped
job.audit.scheduler.no.data = Scheduler has been retrieving no data for a while job.audit.datafeed.no.data = Datafeed has been retrieving no data for a while
job.audit.scheduler.data.seen.again = Scheduler has started retrieving data again job.audit.datafeed.data.seen.again = Datafeed has started retrieving data again
job.audit.scheduler.data.analysis.error = Scheduler is encountering errors submitting data for analysis: {0} job.audit.datafeed.data.analysis.error = Datafeed is encountering errors submitting data for analysis: {0}
job.audit.scheduler.data.extraction.error = Scheduler is encountering errors extracting data: {0} job.audit.datafeed.data.extraction.error = Datafeed is encountering errors extracting data: {0}
job.audit.scheduler.recovered = Scheduler has recovered data extraction and analysis job.audit.datafeed.recovered = Datafeed has recovered data extraction and analysis
system.audit.started = System started system.audit.started = System started
system.audit.shutdown = System shut down system.audit.shutdown = System shut down
@ -115,8 +115,8 @@ job.config.update.background.persist.interval.invalid = Invalid update value for
job.config.update.renormalization.window.days.invalid = Invalid update value for renormalization_window_days: value must be an exact number of days job.config.update.renormalization.window.days.invalid = Invalid update value for renormalization_window_days: value must be an exact number of days
job.config.update.model.snapshot.retention.days.invalid = Invalid update value for model_snapshot_retention_days: value must be an exact number of days job.config.update.model.snapshot.retention.days.invalid = Invalid update value for model_snapshot_retention_days: value must be an exact number of days
job.config.update.results.retention.days.invalid = Invalid update value for results_retention_days: value must be an exact number of days job.config.update.results.retention.days.invalid = Invalid update value for results_retention_days: value must be an exact number of days
job.config.update.scheduler.config.parse.error = JSON parse error reading the update value for scheduler_config job.config.update.datafeed.config.parse.error = JSON parse error reading the update value for datafeed_config
job.config.update.scheduler.config.cannot.be.null = Invalid update value for scheduler_config: null job.config.update.datafeed.config.cannot.be.null = Invalid update value for datafeed_config: null
job.config.transform.circular.dependency = Transform type {0} with inputs {1} has a circular dependency job.config.transform.circular.dependency = Transform type {0} with inputs {1} has a circular dependency
job.config.transform.condition.required = A condition must be defined for transform ''{0}'' job.config.transform.condition.required = A condition must be defined for transform ''{0}''
@ -135,10 +135,10 @@ job.config.unknown.function = Unknown function ''{0}''
job.index.already.exists = Cannot create index ''{0}'' as it already exists job.index.already.exists = Cannot create index ''{0}'' as it already exists
scheduler.config.invalid.option.value = Invalid {0} value ''{1}'' in scheduler configuration datafeed.config.invalid.option.value = Invalid {0} value ''{1}'' in datafeed configuration
scheduler.does.not.support.job.with.latency = A job configured with scheduler cannot support latency datafeed.does.not.support.job.with.latency = A job configured with datafeed cannot support latency
scheduler.aggregations.requires.job.with.summary.count.field = A job configured with a scheduler with aggregations must have summary_count_field_name ''{0}'' datafeed.aggregations.requires.job.with.summary.count.field = A job configured with a datafeed with aggregations must have summary_count_field_name ''{0}''
job.data.concurrent.use.close = Cannot close job {0} while another connection {2}is {1} the job job.data.concurrent.use.close = Cannot close job {0} while another connection {2}is {1} the job
job.data.concurrent.use.flush = Cannot flush job {0} while another connection {2}is {1} the job job.data.concurrent.use.flush = Cannot flush job {0} while another connection {2}is {1} the job
@ -151,12 +151,12 @@ job.data.concurrent.use.upload = Cannot write to job {0} while another connectio
job.missing.quantiles = Cannot read persisted quantiles for job ''{0}'' job.missing.quantiles = Cannot read persisted quantiles for job ''{0}''
job.unknown.id = No known job with id ''{0}'' job.unknown.id = No known job with id ''{0}''
scheduler.cannot.start = Cannot start scheduler [{0}] while its status is {1} datafeed.cannot.start = Cannot start datafeed [{0}] while its status is {1}
scheduler.cannot.stop.in.current.state = Cannot stop scheduler [{0}] while its status is {1} datafeed.cannot.stop.in.current.state = Cannot stop datafeed [{0}] while its status is {1}
scheduler.cannot.update.in.current.state = Cannot update scheduler [{0}] while its status is {1} datafeed.cannot.update.in.current.state = Cannot update datafeed [{0}] while its status is {1}
scheduler.cannot.delete.in.current.state = Cannot delete scheduler [{0}] while its status is {1} datafeed.cannot.delete.in.current.state = Cannot delete datafeed [{0}] while its status is {1}
scheduler.failed.to.stop = Failed to stop scheduler datafeed.failed.to.stop = Failed to stop datafeed
scheduler.not.found = No scheduler with id [{0}] exists datafeed.not.found = No datafeed with id [{0}] exists
json.job.config.mapping.error = JSON mapping error reading the job configuration json.job.config.mapping.error = JSON mapping error reading the job configuration
json.job.config.parse.error = JSON parse error reading the job configuration json.job.config.parse.error = JSON parse error reading the job configuration
@ -170,7 +170,7 @@ json.list.document.parse.error = JSON parse error reading the list
json.transform.config.mapping.error = JSON mapping error reading the transform configuration json.transform.config.mapping.error = JSON mapping error reading the transform configuration
json.transform.config.parse.error = JSON parse error reading the transform configuration json.transform.config.parse.error = JSON parse error reading the transform configuration
rest.action.not.allowed.for.scheduled.job = This action is not allowed for a scheduled job rest.action.not.allowed.for.datafeed.job = This action is not allowed for a datafeed job
rest.invalid.datetime.params = Query param ''{0}'' with value ''{1}'' cannot be parsed as a date or converted to a number (epoch). rest.invalid.datetime.params = Query param ''{0}'' with value ''{1}'' cannot be parsed as a date or converted to a number (epoch).
rest.invalid.flush.params.missing.argument = Invalid flush parameters: ''{0}'' has not been specified. rest.invalid.flush.params.missing.argument = Invalid flush parameters: ''{0}'' has not been specified.

View File

@ -27,9 +27,9 @@ import org.elasticsearch.xpack.ml.job.Detector;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.scheduler.Scheduler; import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.junit.After; import org.junit.After;
import java.io.IOException; import java.io.IOException;
@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ESIntegTestCase.ClusterScope(numDataNodes = 1) @ESIntegTestCase.ClusterScope(numDataNodes = 1)
public class ScheduledJobsIT extends ESIntegTestCase { public class DatafeedJobsIT extends ESIntegTestCase {
@Override @Override
protected Collection<Class<? extends Plugin>> nodePlugins() { protected Collection<Class<? extends Plugin>> nodePlugins() {
@ -85,16 +85,16 @@ public class ScheduledJobsIT extends ESIntegTestCase {
OpenJobAction.Response openJobResponse = client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).get(); OpenJobAction.Response openJobResponse = client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).get();
assertTrue(openJobResponse.isAcknowledged()); assertTrue(openJobResponse.isAcknowledged());
SchedulerConfig schedulerConfig = createScheduler(job.getId() + "-scheduler", job.getId(), Collections.singletonList("data-*")); DatafeedConfig datafeedConfig = createDatafeed(job.getId() + "-datafeed", job.getId(), Collections.singletonList("data-*"));
PutSchedulerAction.Request putSchedulerRequest = new PutSchedulerAction.Request(schedulerConfig); PutDatafeedAction.Request putDatafeedRequest = new PutDatafeedAction.Request(datafeedConfig);
PutSchedulerAction.Response putSchedulerResponse = client().execute(PutSchedulerAction.INSTANCE, putSchedulerRequest).get(); PutDatafeedAction.Response putDatafeedResponse = client().execute(PutDatafeedAction.INSTANCE, putDatafeedRequest).get();
assertTrue(putSchedulerResponse.isAcknowledged()); assertTrue(putDatafeedResponse.isAcknowledged());
StartSchedulerAction.Request startSchedulerRequest = new StartSchedulerAction.Request(schedulerConfig.getId(), 0L); StartDatafeedAction.Request startDatafeedRequest = new StartDatafeedAction.Request(datafeedConfig.getId(), 0L);
startSchedulerRequest.setEndTime(now); startDatafeedRequest.setEndTime(now);
StartSchedulerAction.Response startSchedulerResponse = StartDatafeedAction.Response startDatafeedResponse =
client().execute(StartSchedulerAction.INSTANCE, startSchedulerRequest).get(); client().execute(StartDatafeedAction.INSTANCE, startDatafeedRequest).get();
assertTrue(startSchedulerResponse.isStarted()); assertTrue(startDatafeedResponse.isStarted());
assertBusy(() -> { assertBusy(() -> {
DataCounts dataCounts = getDataCounts(job.getId()); DataCounts dataCounts = getDataCounts(job.getId());
assertThat(dataCounts.getProcessedRecordCount(), equalTo(numDocs + numDocs2)); assertThat(dataCounts.getProcessedRecordCount(), equalTo(numDocs + numDocs2));
@ -102,7 +102,7 @@ public class ScheduledJobsIT extends ESIntegTestCase {
MlMetadata mlMetadata = client().admin().cluster().prepareState().all().get() MlMetadata mlMetadata = client().admin().cluster().prepareState().all().get()
.getState().metaData().custom(MlMetadata.TYPE); .getState().metaData().custom(MlMetadata.TYPE);
assertThat(mlMetadata.getScheduler(schedulerConfig.getId()).get().getStatus(), equalTo(SchedulerStatus.STOPPED)); assertThat(mlMetadata.getDatafeed(datafeedConfig.getId()).get().getStatus(), equalTo(DatafeedStatus.STOPPED));
}); });
} }
@ -122,15 +122,15 @@ public class ScheduledJobsIT extends ESIntegTestCase {
OpenJobAction.Response openJobResponse = client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).get(); OpenJobAction.Response openJobResponse = client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).get();
assertTrue(openJobResponse.isAcknowledged()); assertTrue(openJobResponse.isAcknowledged());
SchedulerConfig schedulerConfig = createScheduler(job.getId() + "-scheduler", job.getId(), Collections.singletonList("data")); DatafeedConfig datafeedConfig = createDatafeed(job.getId() + "-datafeed", job.getId(), Collections.singletonList("data"));
PutSchedulerAction.Request putSchedulerRequest = new PutSchedulerAction.Request(schedulerConfig); PutDatafeedAction.Request putDatafeedRequest = new PutDatafeedAction.Request(datafeedConfig);
PutSchedulerAction.Response putSchedulerResponse = client().execute(PutSchedulerAction.INSTANCE, putSchedulerRequest).get(); PutDatafeedAction.Response putDatafeedResponse = client().execute(PutDatafeedAction.INSTANCE, putDatafeedRequest).get();
assertTrue(putSchedulerResponse.isAcknowledged()); assertTrue(putDatafeedResponse.isAcknowledged());
StartSchedulerAction.Request startSchedulerRequest = new StartSchedulerAction.Request(schedulerConfig.getId(), 0L); StartDatafeedAction.Request startDatafeedRequest = new StartDatafeedAction.Request(datafeedConfig.getId(), 0L);
StartSchedulerAction.Response startSchedulerResponse = StartDatafeedAction.Response startDatafeedResponse =
client().execute(StartSchedulerAction.INSTANCE, startSchedulerRequest).get(); client().execute(StartDatafeedAction.INSTANCE, startDatafeedRequest).get();
assertTrue(startSchedulerResponse.isStarted()); assertTrue(startDatafeedResponse.isStarted());
assertBusy(() -> { assertBusy(() -> {
DataCounts dataCounts = getDataCounts(job.getId()); DataCounts dataCounts = getDataCounts(job.getId());
assertThat(dataCounts.getProcessedRecordCount(), equalTo(numDocs1)); assertThat(dataCounts.getProcessedRecordCount(), equalTo(numDocs1));
@ -146,13 +146,13 @@ public class ScheduledJobsIT extends ESIntegTestCase {
assertThat(dataCounts.getOutOfOrderTimeStampCount(), equalTo(0L)); assertThat(dataCounts.getOutOfOrderTimeStampCount(), equalTo(0L));
}, 30, TimeUnit.SECONDS); }, 30, TimeUnit.SECONDS);
StopSchedulerAction.Request stopSchedulerRequest = new StopSchedulerAction.Request(schedulerConfig.getId()); StopDatafeedAction.Request stopDatafeedRequest = new StopDatafeedAction.Request(datafeedConfig.getId());
StopSchedulerAction.Response stopJobResponse = client().execute(StopSchedulerAction.INSTANCE, stopSchedulerRequest).get(); StopDatafeedAction.Response stopJobResponse = client().execute(StopDatafeedAction.INSTANCE, stopDatafeedRequest).get();
assertTrue(stopJobResponse.isAcknowledged()); assertTrue(stopJobResponse.isAcknowledged());
assertBusy(() -> { assertBusy(() -> {
MlMetadata mlMetadata = client().admin().cluster().prepareState().all().get() MlMetadata mlMetadata = client().admin().cluster().prepareState().all().get()
.getState().metaData().custom(MlMetadata.TYPE); .getState().metaData().custom(MlMetadata.TYPE);
assertThat(mlMetadata.getScheduler(schedulerConfig.getId()).get().getStatus(), equalTo(SchedulerStatus.STOPPED)); assertThat(mlMetadata.getDatafeed(datafeedConfig.getId()).get().getStatus(), equalTo(DatafeedStatus.STOPPED));
}); });
} }
@ -189,8 +189,8 @@ public class ScheduledJobsIT extends ESIntegTestCase {
return builder; return builder;
} }
private SchedulerConfig createScheduler(String schedulerId, String jobId, List<String> indexes) { private DatafeedConfig createDatafeed(String datafeedId, String jobId, List<String> indexes) {
SchedulerConfig.Builder builder = new SchedulerConfig.Builder(schedulerId, jobId); DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedId, jobId);
builder.setQueryDelay(1); builder.setQueryDelay(1);
builder.setFrequency(2); builder.setFrequency(2);
builder.setIndexes(indexes); builder.setIndexes(indexes);
@ -213,36 +213,36 @@ public class ScheduledJobsIT extends ESIntegTestCase {
} }
public static void clearMlMetadata(Client client) throws Exception { public static void clearMlMetadata(Client client) throws Exception {
deleteAllSchedulers(client); deleteAllDatafeeds(client);
deleteAllJobs(client); deleteAllJobs(client);
} }
private static void deleteAllSchedulers(Client client) throws Exception { private static void deleteAllDatafeeds(Client client) throws Exception {
MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData(); MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData();
MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE); MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE);
for (Scheduler scheduler : mlMetadata.getSchedulers().values()) { for (Datafeed datafeed : mlMetadata.getDatafeeds().values()) {
String schedulerId = scheduler.getId(); String datafeedId = datafeed.getId();
try { try {
StopSchedulerAction.Response stopResponse = StopDatafeedAction.Response stopResponse =
client.execute(StopSchedulerAction.INSTANCE, new StopSchedulerAction.Request(schedulerId)).get(); client.execute(StopDatafeedAction.INSTANCE, new StopDatafeedAction.Request(datafeedId)).get();
assertTrue(stopResponse.isAcknowledged()); assertTrue(stopResponse.isAcknowledged());
} catch (ExecutionException e) { } catch (ExecutionException e) {
// CONFLICT is ok, as it means the scheduler has already stopped, which isn't an issue at all. // CONFLICT is ok, as it means the datafeed has already stopped, which isn't an issue at all.
if (RestStatus.CONFLICT != ExceptionsHelper.status(e.getCause())) { if (RestStatus.CONFLICT != ExceptionsHelper.status(e.getCause())) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
assertBusy(() -> { assertBusy(() -> {
try { try {
GetSchedulersStatsAction.Request request = new GetSchedulersStatsAction.Request(schedulerId); GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(datafeedId);
GetSchedulersStatsAction.Response r = client.execute(GetSchedulersStatsAction.INSTANCE, request).get(); GetDatafeedsStatsAction.Response r = client.execute(GetDatafeedsStatsAction.INSTANCE, request).get();
assertThat(r.getResponse().results().get(0).getSchedulerStatus(), equalTo(SchedulerStatus.STOPPED)); assertThat(r.getResponse().results().get(0).getDatafeedStatus(), equalTo(DatafeedStatus.STOPPED));
} catch (InterruptedException | ExecutionException e) { } catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
}); });
DeleteSchedulerAction.Response deleteResponse = DeleteDatafeedAction.Response deleteResponse =
client.execute(DeleteSchedulerAction.INSTANCE, new DeleteSchedulerAction.Request(schedulerId)).get(); client.execute(DeleteDatafeedAction.INSTANCE, new DeleteDatafeedAction.Request(datafeedId)).get();
assertTrue(deleteResponse.isAcknowledged()); assertTrue(deleteResponse.isAcknowledged());
} }
} }

View File

@ -5,10 +5,10 @@
*/ */
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.DeleteSchedulerAction.Request; import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction.Request;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class DeleteSchedulerRequestTests extends AbstractStreamableTestCase<Request> { public class DeleteDatafeedRequestTests extends AbstractStreamableTestCase<Request> {
@Override @Override
protected Request createTestInstance() { protected Request createTestInstance() {
@ -19,4 +19,4 @@ public class DeleteSchedulerRequestTests extends AbstractStreamableTestCase<Requ
protected Request createBlankInstance() { protected Request createBlankInstance() {
return new Request(); return new Request();
} }
} }

View File

@ -5,11 +5,11 @@
*/ */
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetSchedulersAction.Request; import org.elasticsearch.xpack.ml.action.GetDatafeedsAction.Request;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class GetSchedulersActionRequestTests extends AbstractStreamableTestCase<Request> { public class GetDatafeedsActionRequestTests extends AbstractStreamableTestCase<Request> {
@Override @Override
protected Request createTestInstance() { protected Request createTestInstance() {

View File

@ -10,35 +10,35 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.ml.action.GetSchedulersAction.Response; import org.elasticsearch.xpack.ml.action.GetDatafeedsAction.Response;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage; import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.Scheduler; import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfigTests; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
public class GetSchedulersActionResponseTests extends AbstractStreamableTestCase<Response> { public class GetDatafeedsActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override @Override
protected Response createTestInstance() { protected Response createTestInstance() {
final Response result; final Response result;
int listSize = randomInt(10); int listSize = randomInt(10);
List<SchedulerConfig> schedulerList = new ArrayList<>(listSize); List<DatafeedConfig> datafeedList = new ArrayList<>(listSize);
for (int j = 0; j < listSize; j++) { for (int j = 0; j < listSize; j++) {
String schedulerId = SchedulerConfigTests.randomValidSchedulerId(); String datafeedId = DatafeedConfigTests.randomValidDatafeedId();
String jobId = randomAsciiOfLength(10); String jobId = randomAsciiOfLength(10);
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(schedulerId, jobId); DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, jobId);
schedulerConfig.setIndexes(randomSubsetOf(2, Arrays.asList("index-1", "index-2", "index-3"))); datafeedConfig.setIndexes(randomSubsetOf(2, Arrays.asList("index-1", "index-2", "index-3")));
schedulerConfig.setTypes(randomSubsetOf(2, Arrays.asList("type-1", "type-2", "type-3"))); datafeedConfig.setTypes(randomSubsetOf(2, Arrays.asList("type-1", "type-2", "type-3")));
schedulerConfig.setFrequency(randomNonNegativeLong()); datafeedConfig.setFrequency(randomNonNegativeLong());
schedulerConfig.setQueryDelay(randomNonNegativeLong()); datafeedConfig.setQueryDelay(randomNonNegativeLong());
if (randomBoolean()) { if (randomBoolean()) {
schedulerConfig.setQuery(QueryBuilders.termQuery(randomAsciiOfLength(10), randomAsciiOfLength(10))); datafeedConfig.setQuery(QueryBuilders.termQuery(randomAsciiOfLength(10), randomAsciiOfLength(10)));
} }
if (randomBoolean()) { if (randomBoolean()) {
int scriptsSize = randomInt(3); int scriptsSize = randomInt(3);
@ -47,21 +47,21 @@ public class GetSchedulersActionResponseTests extends AbstractStreamableTestCase
scriptFields.add(new SearchSourceBuilder.ScriptField(randomAsciiOfLength(10), new Script(randomAsciiOfLength(10)), scriptFields.add(new SearchSourceBuilder.ScriptField(randomAsciiOfLength(10), new Script(randomAsciiOfLength(10)),
randomBoolean())); randomBoolean()));
} }
schedulerConfig.setScriptFields(scriptFields); datafeedConfig.setScriptFields(scriptFields);
} }
if (randomBoolean()) { if (randomBoolean()) {
schedulerConfig.setScrollSize(randomIntBetween(0, Integer.MAX_VALUE)); datafeedConfig.setScrollSize(randomIntBetween(0, Integer.MAX_VALUE));
} }
if (randomBoolean()) { if (randomBoolean()) {
AggregatorFactories.Builder aggsBuilder = new AggregatorFactories.Builder(); AggregatorFactories.Builder aggsBuilder = new AggregatorFactories.Builder();
aggsBuilder.addAggregator(AggregationBuilders.avg(randomAsciiOfLength(10))); aggsBuilder.addAggregator(AggregationBuilders.avg(randomAsciiOfLength(10)));
schedulerConfig.setAggregations(aggsBuilder); datafeedConfig.setAggregations(aggsBuilder);
} }
schedulerList.add(schedulerConfig.build()); datafeedList.add(datafeedConfig.build());
} }
result = new Response(new QueryPage<>(schedulerList, schedulerList.size(), Scheduler.RESULTS_FIELD)); result = new Response(new QueryPage<>(datafeedList, datafeedList.size(), Datafeed.RESULTS_FIELD));
return result; return result;
} }

View File

@ -5,11 +5,11 @@
*/ */
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetSchedulersStatsAction.Request; import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction.Request;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class GetSchedulersStatsActionRequestTests extends AbstractStreamableTestCase<Request> { public class GetDatafeedsStatsActionRequestTests extends AbstractStreamableTestCase<Request> {
@Override @Override
protected Request createTestInstance() { protected Request createTestInstance() {

View File

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction.Response;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.ArrayList;
import java.util.List;
public class GetDatafeedsStatsActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
final Response result;
int listSize = randomInt(10);
List<Response.DatafeedStats> datafeedStatsList = new ArrayList<>(listSize);
for (int j = 0; j < listSize; j++) {
String datafeedId = randomAsciiOfLength(10);
DatafeedStatus datafeedStatus = randomFrom(DatafeedStatus.values());
Response.DatafeedStats datafeedStats = new Response.DatafeedStats(datafeedId, datafeedStatus);
datafeedStatsList.add(datafeedStats);
}
result = new Response(new QueryPage<>(datafeedStatsList, datafeedStatsList.size(), Datafeed.RESULTS_FIELD));
return result;
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -11,7 +11,7 @@ import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.JobStatus; import org.elasticsearch.xpack.ml.job.JobStatus;
import org.elasticsearch.xpack.ml.job.ModelSizeStats; import org.elasticsearch.xpack.ml.job.ModelSizeStats;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage; import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import org.joda.time.DateTime; import org.joda.time.DateTime;

View File

@ -1,43 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetSchedulersStatsAction.Response;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.ArrayList;
import java.util.List;
public class GetSchedulersStatsActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
final Response result;
int listSize = randomInt(10);
List<Response.SchedulerStats> schedulerStatsList = new ArrayList<>(listSize);
for (int j = 0; j < listSize; j++) {
String schedulerId = randomAsciiOfLength(10);
SchedulerStatus schedulerStatus = randomFrom(SchedulerStatus.values());
Response.SchedulerStats schedulerStats = new Response.SchedulerStats(schedulerId, schedulerStatus);
schedulerStatsList.add(schedulerStats);
}
result = new Response(new QueryPage<>(schedulerStatsList, schedulerStatsList.size(), Scheduler.RESULTS_FIELD));
return result;
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -0,0 +1,44 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.action.PutDatafeedAction.Request;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
import org.junit.Before;
import java.util.Arrays;
public class PutDatafeedActionRequestTests extends AbstractStreamableXContentTestCase<Request> {
private String datafeedId;
@Before
public void setUpDatafeedId() {
datafeedId = DatafeedConfigTests.randomValidDatafeedId();
}
@Override
protected Request createTestInstance() {
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, randomAsciiOfLength(10));
datafeedConfig.setIndexes(Arrays.asList(randomAsciiOfLength(10)));
datafeedConfig.setTypes(Arrays.asList(randomAsciiOfLength(10)));
return new Request(datafeedConfig.build());
}
@Override
protected Request createBlankInstance() {
return new Request();
}
@Override
protected Request parseInstance(XContentParser parser) {
return Request.parseRequest(datafeedId, parser);
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.PutDatafeedAction.Response;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.Arrays;
public class PutDatafeedActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(
DatafeedConfigTests.randomValidDatafeedId(), randomAsciiOfLength(10));
datafeedConfig.setIndexes(Arrays.asList(randomAsciiOfLength(10)));
datafeedConfig.setTypes(Arrays.asList(randomAsciiOfLength(10)));
return new Response(randomBoolean(), datafeedConfig.build());
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -1,44 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.action.PutSchedulerAction.Request;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
import org.junit.Before;
import java.util.Arrays;
public class PutSchedulerActionRequestTests extends AbstractStreamableXContentTestCase<Request> {
private String schedulerId;
@Before
public void setUpSchedulerId() {
schedulerId = SchedulerConfigTests.randomValidSchedulerId();
}
@Override
protected Request createTestInstance() {
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(schedulerId, randomAsciiOfLength(10));
schedulerConfig.setIndexes(Arrays.asList(randomAsciiOfLength(10)));
schedulerConfig.setTypes(Arrays.asList(randomAsciiOfLength(10)));
return new Request(schedulerConfig.build());
}
@Override
protected Request createBlankInstance() {
return new Request();
}
@Override
protected Request parseInstance(XContentParser parser) {
return Request.parseRequest(schedulerId, parser);
}
}

View File

@ -1,31 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.PutSchedulerAction.Response;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.Arrays;
public class PutSchedulerActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(
SchedulerConfigTests.randomValidSchedulerId(), randomAsciiOfLength(10));
schedulerConfig.setIndexes(Arrays.asList(randomAsciiOfLength(10)));
schedulerConfig.setTypes(Arrays.asList(randomAsciiOfLength(10)));
return new Response(randomBoolean(), schedulerConfig.build());
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -6,10 +6,10 @@
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.action.StartSchedulerAction.Request; import org.elasticsearch.xpack.ml.action.StartDatafeedAction.Request;
import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
public class StartSchedulerActionRequestTests extends AbstractStreamableXContentTestCase<StartSchedulerAction.Request> { public class StartDatafeedActionRequestTests extends AbstractStreamableXContentTestCase<StartDatafeedAction.Request> {
@Override @Override
protected Request createTestInstance() { protected Request createTestInstance() {

View File

@ -8,18 +8,18 @@ package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.ml.action.StopSchedulerAction.Request; import org.elasticsearch.xpack.ml.action.StopDatafeedAction.Request;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import static org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests.createScheduledJob; import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedJob;
import static org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests.createSchedulerConfig; import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedConfig;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
public class StopSchedulerActionRequestTests extends AbstractStreamableTestCase<StopSchedulerAction.Request> { public class StopDatafeedActionRequestTests extends AbstractStreamableTestCase<StopDatafeedAction.Request> {
@Override @Override
protected Request createTestInstance() { protected Request createTestInstance() {
@ -34,23 +34,23 @@ public class StopSchedulerActionRequestTests extends AbstractStreamableTestCase<
} }
public void testValidate() { public void testValidate() {
Job job = createScheduledJob().build(); Job job = createDatafeedJob().build();
MlMetadata mlMetadata1 = new MlMetadata.Builder().putJob(job, false).build(); MlMetadata mlMetadata1 = new MlMetadata.Builder().putJob(job, false).build();
Exception e = expectThrows(ResourceNotFoundException.class, () -> StopSchedulerAction.validate("foo", mlMetadata1)); Exception e = expectThrows(ResourceNotFoundException.class, () -> StopDatafeedAction.validate("foo", mlMetadata1));
assertThat(e.getMessage(), equalTo("No scheduler with id [foo] exists")); assertThat(e.getMessage(), equalTo("No datafeed with id [foo] exists"));
SchedulerConfig schedulerConfig = createSchedulerConfig("foo", "foo").build(); DatafeedConfig datafeedConfig = createDatafeedConfig("foo", "foo").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false) MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false)
.putScheduler(schedulerConfig) .putDatafeed(datafeedConfig)
.build(); .build();
e = expectThrows(ElasticsearchStatusException.class, () -> StopSchedulerAction.validate("foo", mlMetadata2)); e = expectThrows(ElasticsearchStatusException.class, () -> StopDatafeedAction.validate("foo", mlMetadata2));
assertThat(e.getMessage(), equalTo("scheduler already stopped, expected scheduler status [STARTED], but got [STOPPED]")); assertThat(e.getMessage(), equalTo("datafeed already stopped, expected datafeed status [STARTED], but got [STOPPED]"));
MlMetadata mlMetadata3 = new MlMetadata.Builder().putJob(job, false) MlMetadata mlMetadata3 = new MlMetadata.Builder().putJob(job, false)
.putScheduler(schedulerConfig) .putDatafeed(datafeedConfig)
.updateSchedulerStatus("foo", SchedulerStatus.STARTED) .updateDatafeedStatus("foo", DatafeedStatus.STARTED)
.build(); .build();
StopSchedulerAction.validate("foo", mlMetadata3); StopDatafeedAction.validate("foo", mlMetadata3);
} }
} }

View File

@ -5,19 +5,19 @@
*/ */
package org.elasticsearch.xpack.ml.action; package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction.Request; import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction.Request;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase; import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class UpdateSchedulerStatusRequestTests extends AbstractStreamableTestCase<Request> { public class UpdateDatafeedStatusRequestTests extends AbstractStreamableTestCase<Request> {
@Override @Override
protected Request createTestInstance() { protected Request createTestInstance() {
return new Request(randomAsciiOfLengthBetween(1, 20), randomFrom(SchedulerStatus.values())); return new Request(randomAsciiOfLengthBetween(1, 20), randomFrom(DatafeedStatus.values()));
} }
@Override @Override
protected Request createBlankInstance() { protected Request createBlankInstance() {
return new Request(); return new Request();
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
@ -23,15 +23,15 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerConfig> { public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedConfig> {
@Override @Override
protected SchedulerConfig createTestInstance() { protected DatafeedConfig createTestInstance() {
return createRandomizedSchedulerConfig(randomAsciiOfLength(10)); return createRandomizedDatafeedConfig(randomAsciiOfLength(10));
} }
public static SchedulerConfig createRandomizedSchedulerConfig(String jobId) { public static DatafeedConfig createRandomizedDatafeedConfig(String jobId) {
SchedulerConfig.Builder builder = new SchedulerConfig.Builder(randomValidSchedulerId(), jobId); DatafeedConfig.Builder builder = new DatafeedConfig.Builder(randomValidDatafeedId(), jobId);
builder.setIndexes(randomStringList(1, 10)); builder.setIndexes(randomStringList(1, 10));
builder.setTypes(randomStringList(1, 10)); builder.setTypes(randomStringList(1, 10));
if (randomBoolean()) { if (randomBoolean()) {
@ -77,64 +77,64 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
} }
@Override @Override
protected Writeable.Reader<SchedulerConfig> instanceReader() { protected Writeable.Reader<DatafeedConfig> instanceReader() {
return SchedulerConfig::new; return DatafeedConfig::new;
} }
@Override @Override
protected SchedulerConfig parseInstance(XContentParser parser) { protected DatafeedConfig parseInstance(XContentParser parser) {
return SchedulerConfig.PARSER.apply(parser, null).build(); return DatafeedConfig.PARSER.apply(parser, null).build();
} }
public void testFillDefaults() { public void testFillDefaults() {
SchedulerConfig.Builder expectedSchedulerConfig = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder expectedDatafeedConfig = new DatafeedConfig.Builder("datafeed1", "job1");
expectedSchedulerConfig.setIndexes(Arrays.asList("index")); expectedDatafeedConfig.setIndexes(Arrays.asList("index"));
expectedSchedulerConfig.setTypes(Arrays.asList("type")); expectedDatafeedConfig.setTypes(Arrays.asList("type"));
expectedSchedulerConfig.setQueryDelay(60L); expectedDatafeedConfig.setQueryDelay(60L);
expectedSchedulerConfig.setScrollSize(1000); expectedDatafeedConfig.setScrollSize(1000);
SchedulerConfig.Builder defaultedSchedulerConfig = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder defaultedDatafeedConfig = new DatafeedConfig.Builder("datafeed1", "job1");
defaultedSchedulerConfig.setIndexes(Arrays.asList("index")); defaultedDatafeedConfig.setIndexes(Arrays.asList("index"));
defaultedSchedulerConfig.setTypes(Arrays.asList("type")); defaultedDatafeedConfig.setTypes(Arrays.asList("type"));
assertEquals(expectedSchedulerConfig.build(), defaultedSchedulerConfig.build()); assertEquals(expectedDatafeedConfig.build(), defaultedDatafeedConfig.build());
} }
public void testEquals_GivenDifferentQueryDelay() { public void testEquals_GivenDifferentQueryDelay() {
SchedulerConfig.Builder b1 = createFullyPopulated(); DatafeedConfig.Builder b1 = createFullyPopulated();
SchedulerConfig.Builder b2 = createFullyPopulated(); DatafeedConfig.Builder b2 = createFullyPopulated();
b2.setQueryDelay(120L); b2.setQueryDelay(120L);
SchedulerConfig sc1 = b1.build(); DatafeedConfig sc1 = b1.build();
SchedulerConfig sc2 = b2.build(); DatafeedConfig sc2 = b2.build();
assertFalse(sc1.equals(sc2)); assertFalse(sc1.equals(sc2));
assertFalse(sc2.equals(sc1)); assertFalse(sc2.equals(sc1));
} }
public void testEquals_GivenDifferentScrollSize() { public void testEquals_GivenDifferentScrollSize() {
SchedulerConfig.Builder b1 = createFullyPopulated(); DatafeedConfig.Builder b1 = createFullyPopulated();
SchedulerConfig.Builder b2 = createFullyPopulated(); DatafeedConfig.Builder b2 = createFullyPopulated();
b2.setScrollSize(1); b2.setScrollSize(1);
SchedulerConfig sc1 = b1.build(); DatafeedConfig sc1 = b1.build();
SchedulerConfig sc2 = b2.build(); DatafeedConfig sc2 = b2.build();
assertFalse(sc1.equals(sc2)); assertFalse(sc1.equals(sc2));
assertFalse(sc2.equals(sc1)); assertFalse(sc2.equals(sc1));
} }
public void testEquals_GivenDifferentFrequency() { public void testEquals_GivenDifferentFrequency() {
SchedulerConfig.Builder b1 = createFullyPopulated(); DatafeedConfig.Builder b1 = createFullyPopulated();
SchedulerConfig.Builder b2 = createFullyPopulated(); DatafeedConfig.Builder b2 = createFullyPopulated();
b2.setFrequency(120L); b2.setFrequency(120L);
SchedulerConfig sc1 = b1.build(); DatafeedConfig sc1 = b1.build();
SchedulerConfig sc2 = b2.build(); DatafeedConfig sc2 = b2.build();
assertFalse(sc1.equals(sc2)); assertFalse(sc1.equals(sc2));
assertFalse(sc2.equals(sc1)); assertFalse(sc2.equals(sc1));
} }
public void testEquals_GivenDifferentIndexes() { public void testEquals_GivenDifferentIndexes() {
SchedulerConfig.Builder sc1 = createFullyPopulated(); DatafeedConfig.Builder sc1 = createFullyPopulated();
SchedulerConfig.Builder sc2 = createFullyPopulated(); DatafeedConfig.Builder sc2 = createFullyPopulated();
sc2.setIndexes(Arrays.asList("blah", "di", "blah")); sc2.setIndexes(Arrays.asList("blah", "di", "blah"));
assertFalse(sc1.build().equals(sc2.build())); assertFalse(sc1.build().equals(sc2.build()));
@ -142,8 +142,8 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
} }
public void testEquals_GivenDifferentTypes() { public void testEquals_GivenDifferentTypes() {
SchedulerConfig.Builder sc1 = createFullyPopulated(); DatafeedConfig.Builder sc1 = createFullyPopulated();
SchedulerConfig.Builder sc2 = createFullyPopulated(); DatafeedConfig.Builder sc2 = createFullyPopulated();
sc2.setTypes(Arrays.asList("blah", "di", "blah")); sc2.setTypes(Arrays.asList("blah", "di", "blah"));
assertFalse(sc1.build().equals(sc2.build())); assertFalse(sc1.build().equals(sc2.build()));
@ -151,27 +151,27 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
} }
public void testEquals_GivenDifferentQuery() { public void testEquals_GivenDifferentQuery() {
SchedulerConfig.Builder b1 = createFullyPopulated(); DatafeedConfig.Builder b1 = createFullyPopulated();
SchedulerConfig.Builder b2 = createFullyPopulated(); DatafeedConfig.Builder b2 = createFullyPopulated();
b2.setQuery(QueryBuilders.termQuery("foo", "bar")); b2.setQuery(QueryBuilders.termQuery("foo", "bar"));
SchedulerConfig sc1 = b1.build(); DatafeedConfig sc1 = b1.build();
SchedulerConfig sc2 = b2.build(); DatafeedConfig sc2 = b2.build();
assertFalse(sc1.equals(sc2)); assertFalse(sc1.equals(sc2));
assertFalse(sc2.equals(sc1)); assertFalse(sc2.equals(sc1));
} }
public void testEquals_GivenDifferentAggregations() { public void testEquals_GivenDifferentAggregations() {
SchedulerConfig.Builder sc1 = createFullyPopulated(); DatafeedConfig.Builder sc1 = createFullyPopulated();
SchedulerConfig.Builder sc2 = createFullyPopulated(); DatafeedConfig.Builder sc2 = createFullyPopulated();
sc2.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.count("foo"))); sc2.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.count("foo")));
assertFalse(sc1.build().equals(sc2.build())); assertFalse(sc1.build().equals(sc2.build()));
assertFalse(sc2.build().equals(sc1.build())); assertFalse(sc2.build().equals(sc1.build()));
} }
private static SchedulerConfig.Builder createFullyPopulated() { private static DatafeedConfig.Builder createFullyPopulated() {
SchedulerConfig.Builder sc = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder sc = new DatafeedConfig.Builder("datafeed1", "job1");
sc.setIndexes(Arrays.asList("myIndex")); sc.setIndexes(Arrays.asList("myIndex"));
sc.setTypes(Arrays.asList("myType1", "myType2")); sc.setTypes(Arrays.asList("myType1", "myType2"));
sc.setFrequency(60L); sc.setFrequency(60L);
@ -183,62 +183,62 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
} }
public void testCheckValid_GivenNullIndexes() throws IOException { public void testCheckValid_GivenNullIndexes() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
expectThrows(IllegalArgumentException.class, () -> conf.setIndexes(null)); expectThrows(IllegalArgumentException.class, () -> conf.setIndexes(null));
} }
public void testCheckValid_GivenEmptyIndexes() throws IOException { public void testCheckValid_GivenEmptyIndexes() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(Collections.emptyList()); conf.setIndexes(Collections.emptyList());
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build); IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "indexes", "[]"), e.getMessage()); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[]"), e.getMessage());
} }
public void testCheckValid_GivenIndexesContainsOnlyNulls() throws IOException { public void testCheckValid_GivenIndexesContainsOnlyNulls() throws IOException {
List<String> indexes = new ArrayList<>(); List<String> indexes = new ArrayList<>();
indexes.add(null); indexes.add(null);
indexes.add(null); indexes.add(null);
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(indexes); conf.setIndexes(indexes);
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build); IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "indexes", "[null, null]"), e.getMessage()); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[null, null]"), e.getMessage());
} }
public void testCheckValid_GivenIndexesContainsOnlyEmptyStrings() throws IOException { public void testCheckValid_GivenIndexesContainsOnlyEmptyStrings() throws IOException {
List<String> indexes = new ArrayList<>(); List<String> indexes = new ArrayList<>();
indexes.add(""); indexes.add("");
indexes.add(""); indexes.add("");
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(indexes); conf.setIndexes(indexes);
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build); IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "indexes", "[, ]"), e.getMessage()); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[, ]"), e.getMessage());
} }
public void testCheckValid_GivenNegativeQueryDelay() throws IOException { public void testCheckValid_GivenNegativeQueryDelay() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setQueryDelay(-10L)); IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setQueryDelay(-10L));
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "query_delay", -10L), e.getMessage()); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "query_delay", -10L), e.getMessage());
} }
public void testCheckValid_GivenZeroFrequency() throws IOException { public void testCheckValid_GivenZeroFrequency() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setFrequency(0L)); IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setFrequency(0L));
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "frequency", 0L), e.getMessage()); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "frequency", 0L), e.getMessage());
} }
public void testCheckValid_GivenNegativeFrequency() throws IOException { public void testCheckValid_GivenNegativeFrequency() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setFrequency(-600L)); IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setFrequency(-600L));
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "frequency", -600L), e.getMessage()); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "frequency", -600L), e.getMessage());
} }
public void testCheckValid_GivenNegativeScrollSize() throws IOException { public void testCheckValid_GivenNegativeScrollSize() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1"); DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setScrollSize(-1000)); IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setScrollSize(-1000));
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "scroll_size", -1000L), e.getMessage()); assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "scroll_size", -1000L), e.getMessage());
} }
public static String randomValidSchedulerId() { public static String randomValidDatafeedId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray()); CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10); return generator.ofCodePointsLength(random(), 10, 10);
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.ResourceNotFoundException;
@ -20,9 +20,9 @@ import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.FlushJobAction; import org.elasticsearch.xpack.ml.action.FlushJobAction;
import org.elasticsearch.xpack.ml.action.InternalStartSchedulerAction; import org.elasticsearch.xpack.ml.action.InternalStartDatafeedAction;
import org.elasticsearch.xpack.ml.action.PostDataAction; import org.elasticsearch.xpack.ml.action.PostDataAction;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction; import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction;
import org.elasticsearch.xpack.ml.job.AnalysisConfig; import org.elasticsearch.xpack.ml.job.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.DataCounts; import org.elasticsearch.xpack.ml.job.DataCounts;
import org.elasticsearch.xpack.ml.job.DataDescription; import org.elasticsearch.xpack.ml.job.DataDescription;
@ -32,8 +32,8 @@ import org.elasticsearch.xpack.ml.job.JobStatus;
import org.elasticsearch.xpack.ml.job.audit.Auditor; import org.elasticsearch.xpack.ml.job.audit.Auditor;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata; import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import org.junit.Before; import org.junit.Before;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
@ -45,8 +45,8 @@ import java.util.Optional;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.function.Consumer; import java.util.function.Consumer;
import static org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction.INSTANCE; import static org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction.INSTANCE;
import static org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction.Request; import static org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction.Request;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyString;
@ -59,7 +59,7 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
public class ScheduledJobRunnerTests extends ESTestCase { public class DatafeedJobRunnerTests extends ESTestCase {
private Client client; private Client client;
private ActionFuture<PostDataAction.Response> jobDataFuture; private ActionFuture<PostDataAction.Response> jobDataFuture;
@ -67,7 +67,7 @@ public class ScheduledJobRunnerTests extends ESTestCase {
private ClusterService clusterService; private ClusterService clusterService;
private ThreadPool threadPool; private ThreadPool threadPool;
private DataExtractorFactory dataExtractorFactory; private DataExtractorFactory dataExtractorFactory;
private ScheduledJobRunner scheduledJobRunner; private DatafeedJobRunner datafeedJobRunner;
private long currentTime = 120000; private long currentTime = 120000;
@Before @Before
@ -80,9 +80,9 @@ public class ScheduledJobRunnerTests extends ESTestCase {
doAnswer(invocation -> { doAnswer(invocation -> {
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
ActionListener<Object> actionListener = (ActionListener) invocation.getArguments()[2]; ActionListener<Object> actionListener = (ActionListener) invocation.getArguments()[2];
actionListener.onResponse(new UpdateSchedulerStatusAction.Response()); actionListener.onResponse(new UpdateDatafeedStatusAction.Response());
return null; return null;
}).when(client).execute(same(UpdateSchedulerStatusAction.INSTANCE), any(), any()); }).when(client).execute(same(UpdateDatafeedStatusAction.INSTANCE), any(), any());
JobProvider jobProvider = mock(JobProvider.class); JobProvider jobProvider = mock(JobProvider.class);
Mockito.doAnswer(invocationOnMock -> { Mockito.doAnswer(invocationOnMock -> {
@ -100,13 +100,13 @@ public class ScheduledJobRunnerTests extends ESTestCase {
((Runnable) invocation.getArguments()[0]).run(); ((Runnable) invocation.getArguments()[0]).run();
return null; return null;
}).when(executorService).submit(any(Runnable.class)); }).when(executorService).submit(any(Runnable.class));
when(threadPool.executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME)).thenReturn(executorService); when(threadPool.executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME)).thenReturn(executorService);
when(client.execute(same(PostDataAction.INSTANCE), any())).thenReturn(jobDataFuture); when(client.execute(same(PostDataAction.INSTANCE), any())).thenReturn(jobDataFuture);
when(client.execute(same(FlushJobAction.INSTANCE), any())).thenReturn(flushJobFuture); when(client.execute(same(FlushJobAction.INSTANCE), any())).thenReturn(flushJobFuture);
scheduledJobRunner = new ScheduledJobRunner(threadPool, client, clusterService, jobProvider, () -> currentTime) { datafeedJobRunner = new DatafeedJobRunner(threadPool, client, clusterService, jobProvider, () -> currentTime) {
@Override @Override
DataExtractorFactory createDataExtractorFactory(SchedulerConfig schedulerConfig, Job job) { DataExtractorFactory createDataExtractorFactory(DatafeedConfig datafeedConfig, Job job) {
return dataExtractorFactory; return dataExtractorFactory;
} }
}; };
@ -121,13 +121,13 @@ public class ScheduledJobRunnerTests extends ESTestCase {
} }
public void testStart_GivenNewlyCreatedJobLoopBack() throws Exception { public void testStart_GivenNewlyCreatedJobLoopBack() throws Exception {
Job.Builder jobBuilder = createScheduledJob(); Job.Builder jobBuilder = createDatafeedJob();
SchedulerConfig schedulerConfig = createSchedulerConfig("scheduler1", "foo").build(); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", "foo").build();
DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0)); DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0));
Job job = jobBuilder.build(); Job job = jobBuilder.build();
MlMetadata mlMetadata = new MlMetadata.Builder() MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false) .putJob(job, false)
.putScheduler(schedulerConfig) .putDatafeed(datafeedConfig)
.updateStatus("foo", JobStatus.OPENED, null) .updateStatus("foo", JobStatus.OPENED, null)
.build(); .build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name")) when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
@ -141,15 +141,15 @@ public class ScheduledJobRunnerTests extends ESTestCase {
when(dataExtractor.next()).thenReturn(Optional.of(in)); when(dataExtractor.next()).thenReturn(Optional.of(in));
when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts)); when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts));
Consumer<Exception> handler = mockConsumer(); Consumer<Exception> handler = mockConsumer();
InternalStartSchedulerAction.SchedulerTask task = mock(InternalStartSchedulerAction.SchedulerTask.class); InternalStartDatafeedAction.DatafeedTask task = mock(InternalStartDatafeedAction.DatafeedTask.class);
scheduledJobRunner.run("scheduler1", 0L, 60000L, task, handler); datafeedJobRunner.run("datafeed1", 0L, 60000L, task, handler);
verify(threadPool, times(1)).executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME); verify(threadPool, times(1)).executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME);
verify(threadPool, never()).schedule(any(), any(), any()); verify(threadPool, never()).schedule(any(), any(), any());
verify(client).execute(same(PostDataAction.INSTANCE), eq(createExpectedPostDataRequest(job))); verify(client).execute(same(PostDataAction.INSTANCE), eq(createExpectedPostDataRequest(job)));
verify(client).execute(same(FlushJobAction.INSTANCE), any()); verify(client).execute(same(FlushJobAction.INSTANCE), any());
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STARTED)), any()); verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STARTED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STOPPED)), any()); verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STOPPED)), any());
} }
private static PostDataAction.Request createExpectedPostDataRequest(Job job) { private static PostDataAction.Request createExpectedPostDataRequest(Job job) {
@ -162,13 +162,13 @@ public class ScheduledJobRunnerTests extends ESTestCase {
} }
public void testStart_extractionProblem() throws Exception { public void testStart_extractionProblem() throws Exception {
Job.Builder jobBuilder = createScheduledJob(); Job.Builder jobBuilder = createDatafeedJob();
SchedulerConfig schedulerConfig = createSchedulerConfig("scheduler1", "foo").build(); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", "foo").build();
DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0)); DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0));
Job job = jobBuilder.build(); Job job = jobBuilder.build();
MlMetadata mlMetadata = new MlMetadata.Builder() MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false) .putJob(job, false)
.putScheduler(schedulerConfig) .putDatafeed(datafeedConfig)
.updateStatus("foo", JobStatus.OPENED, null) .updateStatus("foo", JobStatus.OPENED, null)
.build(); .build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name")) when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
@ -181,25 +181,25 @@ public class ScheduledJobRunnerTests extends ESTestCase {
when(dataExtractor.next()).thenThrow(new RuntimeException("dummy")); when(dataExtractor.next()).thenThrow(new RuntimeException("dummy"));
when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts)); when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts));
Consumer<Exception> handler = mockConsumer(); Consumer<Exception> handler = mockConsumer();
InternalStartSchedulerAction.SchedulerTask task = mock(InternalStartSchedulerAction.SchedulerTask.class); InternalStartDatafeedAction.DatafeedTask task = mock(InternalStartDatafeedAction.DatafeedTask.class);
scheduledJobRunner.run("scheduler1", 0L, 60000L, task, handler); datafeedJobRunner.run("datafeed1", 0L, 60000L, task, handler);
verify(threadPool, times(1)).executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME); verify(threadPool, times(1)).executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME);
verify(threadPool, never()).schedule(any(), any(), any()); verify(threadPool, never()).schedule(any(), any(), any());
verify(client, never()).execute(same(PostDataAction.INSTANCE), eq(new PostDataAction.Request("foo"))); verify(client, never()).execute(same(PostDataAction.INSTANCE), eq(new PostDataAction.Request("foo")));
verify(client, never()).execute(same(FlushJobAction.INSTANCE), any()); verify(client, never()).execute(same(FlushJobAction.INSTANCE), any());
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STARTED)), any()); verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STARTED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STOPPED)), any()); verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STOPPED)), any());
} }
public void testStart_GivenNewlyCreatedJobLoopBackAndRealtime() throws Exception { public void testStart_GivenNewlyCreatedJobLoopBackAndRealtime() throws Exception {
Job.Builder jobBuilder = createScheduledJob(); Job.Builder jobBuilder = createDatafeedJob();
SchedulerConfig schedulerConfig = createSchedulerConfig("scheduler1", "foo").build(); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", "foo").build();
DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0)); DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0));
Job job = jobBuilder.build(); Job job = jobBuilder.build();
MlMetadata mlMetadata = new MlMetadata.Builder() MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false) .putJob(job, false)
.putScheduler(schedulerConfig) .putDatafeed(datafeedConfig)
.updateStatus("foo", JobStatus.OPENED, null) .updateStatus("foo", JobStatus.OPENED, null)
.build(); .build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name")) when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
@ -214,29 +214,29 @@ public class ScheduledJobRunnerTests extends ESTestCase {
when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts)); when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts));
Consumer<Exception> handler = mockConsumer(); Consumer<Exception> handler = mockConsumer();
boolean cancelled = randomBoolean(); boolean cancelled = randomBoolean();
InternalStartSchedulerAction.SchedulerTask task = InternalStartDatafeedAction.DatafeedTask task =
new InternalStartSchedulerAction.SchedulerTask(1, "type", "action", null, "scheduler1"); new InternalStartDatafeedAction.DatafeedTask(1, "type", "action", null, "datafeed1");
scheduledJobRunner.run("scheduler1", 0L, null, task, handler); datafeedJobRunner.run("datafeed1", 0L, null, task, handler);
verify(threadPool, times(1)).executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME); verify(threadPool, times(1)).executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME);
if (cancelled) { if (cancelled) {
task.stop(); task.stop();
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STOPPED)), any()); verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STOPPED)), any());
} else { } else {
verify(client).execute(same(PostDataAction.INSTANCE), eq(createExpectedPostDataRequest(job))); verify(client).execute(same(PostDataAction.INSTANCE), eq(createExpectedPostDataRequest(job)));
verify(client).execute(same(FlushJobAction.INSTANCE), any()); verify(client).execute(same(FlushJobAction.INSTANCE), any());
verify(threadPool, times(1)).schedule(eq(new TimeValue(480100)), eq(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME), any()); verify(threadPool, times(1)).schedule(eq(new TimeValue(480100)), eq(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME), any());
} }
} }
public static SchedulerConfig.Builder createSchedulerConfig(String schedulerId, String jobId) { public static DatafeedConfig.Builder createDatafeedConfig(String datafeedId, String jobId) {
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(schedulerId, jobId); DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, jobId);
schedulerConfig.setIndexes(Arrays.asList("myIndex")); datafeedConfig.setIndexes(Arrays.asList("myIndex"));
schedulerConfig.setTypes(Arrays.asList("myType")); datafeedConfig.setTypes(Arrays.asList("myType"));
return schedulerConfig; return datafeedConfig;
} }
public static Job.Builder createScheduledJob() { public static Job.Builder createDatafeedJob() {
AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Arrays.asList(new Detector.Builder("metric", "field").build())); AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Arrays.asList(new Detector.Builder("metric", "field").build()));
acBuilder.setBucketSpan(3600L); acBuilder.setBucketSpan(3600L);
acBuilder.setDetectors(Arrays.asList(new Detector.Builder("metric", "field").build())); acBuilder.setDetectors(Arrays.asList(new Detector.Builder("metric", "field").build()));
@ -247,33 +247,33 @@ public class ScheduledJobRunnerTests extends ESTestCase {
} }
public void testValidate() { public void testValidate() {
Job job1 = createScheduledJob().build(); Job job1 = createDatafeedJob().build();
MlMetadata mlMetadata1 = new MlMetadata.Builder() MlMetadata mlMetadata1 = new MlMetadata.Builder()
.putJob(job1, false) .putJob(job1, false)
.build(); .build();
Exception e = expectThrows(ResourceNotFoundException.class, Exception e = expectThrows(ResourceNotFoundException.class,
() -> ScheduledJobRunner.validate("some-scheduler", mlMetadata1)); () -> DatafeedJobRunner.validate("some-datafeed", mlMetadata1));
assertThat(e.getMessage(), equalTo("No scheduler with id [some-scheduler] exists")); assertThat(e.getMessage(), equalTo("No datafeed with id [some-datafeed] exists"));
SchedulerConfig schedulerConfig1 = createSchedulerConfig("foo-scheduler", "foo").build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("foo-datafeed", "foo").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1)
.putScheduler(schedulerConfig1) .putDatafeed(datafeedConfig1)
.build(); .build();
e = expectThrows(ElasticsearchStatusException.class, e = expectThrows(ElasticsearchStatusException.class,
() -> ScheduledJobRunner.validate("foo-scheduler", mlMetadata2)); () -> DatafeedJobRunner.validate("foo-datafeed", mlMetadata2));
assertThat(e.getMessage(), equalTo("cannot start scheduler, expected job status [OPENED], but got [CLOSED]")); assertThat(e.getMessage(), equalTo("cannot start datafeed, expected job status [OPENED], but got [CLOSED]"));
MlMetadata mlMetadata3 = new MlMetadata.Builder(mlMetadata2) MlMetadata mlMetadata3 = new MlMetadata.Builder(mlMetadata2)
.updateStatus("foo", JobStatus.OPENED, null) .updateStatus("foo", JobStatus.OPENED, null)
.updateSchedulerStatus("foo-scheduler", SchedulerStatus.STARTED) .updateDatafeedStatus("foo-datafeed", DatafeedStatus.STARTED)
.build(); .build();
e = expectThrows(ElasticsearchStatusException.class, e = expectThrows(ElasticsearchStatusException.class,
() -> ScheduledJobRunner.validate("foo-scheduler", mlMetadata3)); () -> DatafeedJobRunner.validate("foo-datafeed", mlMetadata3));
assertThat(e.getMessage(), equalTo("scheduler already started, expected scheduler status [STOPPED], but got [STARTED]")); assertThat(e.getMessage(), equalTo("datafeed already started, expected datafeed status [STOPPED], but got [STARTED]"));
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private Consumer<Exception> mockConsumer() { private Consumer<Exception> mockConsumer() {
return mock(Consumer.class); return mock(Consumer.class);
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
@ -13,8 +13,8 @@ import org.elasticsearch.xpack.ml.action.PostDataAction;
import org.elasticsearch.xpack.ml.job.DataCounts; import org.elasticsearch.xpack.ml.job.DataCounts;
import org.elasticsearch.xpack.ml.job.DataDescription; import org.elasticsearch.xpack.ml.job.DataDescription;
import org.elasticsearch.xpack.ml.job.audit.Auditor; import org.elasticsearch.xpack.ml.job.audit.Auditor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import org.junit.Before; import org.junit.Before;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
@ -35,7 +35,7 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
public class ScheduledJobTests extends ESTestCase { public class DatafeedJobTests extends ESTestCase {
private Auditor auditor; private Auditor auditor;
private DataExtractorFactory dataExtractorFactory; private DataExtractorFactory dataExtractorFactory;
@ -73,8 +73,8 @@ public class ScheduledJobTests extends ESTestCase {
} }
public void testLookBackRunWithEndTime() throws Exception { public void testLookBackRunWithEndTime() throws Exception {
ScheduledJob scheduledJob = createScheduledJob(1000, 500, -1, -1); DatafeedJob datafeedJob = createDatafeedJob(1000, 500, -1, -1);
assertNull(scheduledJob.runLookBack(0L, 1000L)); assertNull(datafeedJob.runLookBack(0L, 1000L));
verify(dataExtractorFactory).newExtractor(0L, 1000L); verify(dataExtractorFactory).newExtractor(0L, 1000L);
FlushJobAction.Request flushRequest = new FlushJobAction.Request("_job_id"); FlushJobAction.Request flushRequest = new FlushJobAction.Request("_job_id");
@ -86,8 +86,8 @@ public class ScheduledJobTests extends ESTestCase {
currentTime = 2000L; currentTime = 2000L;
long frequencyMs = 1000; long frequencyMs = 1000;
long queryDelayMs = 500; long queryDelayMs = 500;
ScheduledJob scheduledJob = createScheduledJob(frequencyMs, queryDelayMs, -1, -1); DatafeedJob datafeedJob = createDatafeedJob(frequencyMs, queryDelayMs, -1, -1);
long next = scheduledJob.runLookBack(0L, null); long next = datafeedJob.runLookBack(0L, null);
assertEquals(2000 + frequencyMs + 100, next); assertEquals(2000 + frequencyMs + 100, next);
verify(dataExtractorFactory).newExtractor(0L, 1500L); verify(dataExtractorFactory).newExtractor(0L, 1500L);
@ -108,8 +108,8 @@ public class ScheduledJobTests extends ESTestCase {
long frequencyMs = 1000; long frequencyMs = 1000;
long queryDelayMs = 500; long queryDelayMs = 500;
ScheduledJob scheduledJob = createScheduledJob(frequencyMs, queryDelayMs, latestFinalBucketEndTimeMs, latestRecordTimeMs); DatafeedJob datafeedJob = createDatafeedJob(frequencyMs, queryDelayMs, latestFinalBucketEndTimeMs, latestRecordTimeMs);
long next = scheduledJob.runLookBack(0L, null); long next = datafeedJob.runLookBack(0L, null);
assertEquals(10000 + frequencyMs + 100, next); assertEquals(10000 + frequencyMs + 100, next);
verify(dataExtractorFactory).newExtractor(5000 + 1L, currentTime - queryDelayMs); verify(dataExtractorFactory).newExtractor(5000 + 1L, currentTime - queryDelayMs);
@ -122,8 +122,8 @@ public class ScheduledJobTests extends ESTestCase {
currentTime = 60000L; currentTime = 60000L;
long frequencyMs = 100; long frequencyMs = 100;
long queryDelayMs = 1000; long queryDelayMs = 1000;
ScheduledJob scheduledJob = createScheduledJob(frequencyMs, queryDelayMs, 1000, -1); DatafeedJob datafeedJob = createDatafeedJob(frequencyMs, queryDelayMs, 1000, -1);
long next = scheduledJob.runRealtime(); long next = datafeedJob.runRealtime();
assertEquals(currentTime + frequencyMs + 100, next); assertEquals(currentTime + frequencyMs + 100, next);
verify(dataExtractorFactory).newExtractor(1000L + 1L, currentTime - queryDelayMs); verify(dataExtractorFactory).newExtractor(1000L + 1L, currentTime - queryDelayMs);
@ -136,19 +136,19 @@ public class ScheduledJobTests extends ESTestCase {
public void testEmptyDataCount() throws Exception { public void testEmptyDataCount() throws Exception {
when(dataExtractor.hasNext()).thenReturn(false); when(dataExtractor.hasNext()).thenReturn(false);
ScheduledJob scheduledJob = createScheduledJob(1000, 500, -1, -1); DatafeedJob datafeedJob = createDatafeedJob(1000, 500, -1, -1);
expectThrows(ScheduledJob.EmptyDataCountException.class, () -> scheduledJob.runLookBack(0L, 1000L)); expectThrows(DatafeedJob.EmptyDataCountException.class, () -> datafeedJob.runLookBack(0L, 1000L));
} }
public void testExtractionProblem() throws Exception { public void testExtractionProblem() throws Exception {
when(dataExtractor.hasNext()).thenReturn(true); when(dataExtractor.hasNext()).thenReturn(true);
when(dataExtractor.next()).thenThrow(new IOException()); when(dataExtractor.next()).thenThrow(new IOException());
ScheduledJob scheduledJob = createScheduledJob(1000, 500, -1, -1); DatafeedJob datafeedJob = createDatafeedJob(1000, 500, -1, -1);
expectThrows(ScheduledJob.ExtractionProblemException.class, () -> scheduledJob.runLookBack(0L, 1000L)); expectThrows(DatafeedJob.ExtractionProblemException.class, () -> datafeedJob.runLookBack(0L, 1000L));
currentTime = 3001; currentTime = 3001;
expectThrows(ScheduledJob.ExtractionProblemException.class, scheduledJob::runRealtime); expectThrows(DatafeedJob.ExtractionProblemException.class, datafeedJob::runRealtime);
ArgumentCaptor<Long> startTimeCaptor = ArgumentCaptor.forClass(Long.class); ArgumentCaptor<Long> startTimeCaptor = ArgumentCaptor.forClass(Long.class);
ArgumentCaptor<Long> endTimeCaptor = ArgumentCaptor.forClass(Long.class); ArgumentCaptor<Long> endTimeCaptor = ArgumentCaptor.forClass(Long.class);
@ -164,11 +164,11 @@ public class ScheduledJobTests extends ESTestCase {
when(client.execute(same(FlushJobAction.INSTANCE), any())).thenReturn(flushJobFuture); when(client.execute(same(FlushJobAction.INSTANCE), any())).thenReturn(flushJobFuture);
when(client.execute(same(PostDataAction.INSTANCE), eq(new PostDataAction.Request("_job_id")))).thenThrow(new RuntimeException()); when(client.execute(same(PostDataAction.INSTANCE), eq(new PostDataAction.Request("_job_id")))).thenThrow(new RuntimeException());
ScheduledJob scheduledJob = createScheduledJob(1000, 500, -1, -1); DatafeedJob datafeedJob = createDatafeedJob(1000, 500, -1, -1);
expectThrows(ScheduledJob.AnalysisProblemException.class, () -> scheduledJob.runLookBack(0L, 1000L)); expectThrows(DatafeedJob.AnalysisProblemException.class, () -> datafeedJob.runLookBack(0L, 1000L));
currentTime = 3001; currentTime = 3001;
expectThrows(ScheduledJob.EmptyDataCountException.class, scheduledJob::runRealtime); expectThrows(DatafeedJob.EmptyDataCountException.class, datafeedJob::runRealtime);
ArgumentCaptor<Long> startTimeCaptor = ArgumentCaptor.forClass(Long.class); ArgumentCaptor<Long> startTimeCaptor = ArgumentCaptor.forClass(Long.class);
ArgumentCaptor<Long> endTimeCaptor = ArgumentCaptor.forClass(Long.class); ArgumentCaptor<Long> endTimeCaptor = ArgumentCaptor.forClass(Long.class);
@ -180,10 +180,10 @@ public class ScheduledJobTests extends ESTestCase {
verify(client, times(0)).execute(same(FlushJobAction.INSTANCE), any()); verify(client, times(0)).execute(same(FlushJobAction.INSTANCE), any());
} }
private ScheduledJob createScheduledJob(long frequencyMs, long queryDelayMs, long latestFinalBucketEndTimeMs, private DatafeedJob createDatafeedJob(long frequencyMs, long queryDelayMs, long latestFinalBucketEndTimeMs,
long latestRecordTimeMs) { long latestRecordTimeMs) {
Supplier<Long> currentTimeSupplier = () -> currentTime; Supplier<Long> currentTimeSupplier = () -> currentTime;
return new ScheduledJob("_job_id", dataDescription.build(), frequencyMs, queryDelayMs, dataExtractorFactory, client, auditor, return new DatafeedJob("_job_id", dataDescription.build(), frequencyMs, queryDelayMs, dataExtractorFactory, client, auditor,
currentTimeSupplier, latestFinalBucketEndTimeMs, latestRecordTimeMs); currentTimeSupplier, latestFinalBucketEndTimeMs, latestRecordTimeMs);
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -18,20 +18,20 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
public class ScheduledJobValidatorTests extends ESTestCase { public class DatafeedJobValidatorTests extends ESTestCase {
public void testValidate_GivenNonZeroLatency() { public void testValidate_GivenNonZeroLatency() {
String errorMessage = Messages.getMessage(Messages.SCHEDULER_DOES_NOT_SUPPORT_JOB_WITH_LATENCY); String errorMessage = Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY);
Job.Builder builder = buildJobBuilder("foo"); Job.Builder builder = buildJobBuilder("foo");
AnalysisConfig.Builder ac = createAnalysisConfig(); AnalysisConfig.Builder ac = createAnalysisConfig();
ac.setBucketSpan(1800L); ac.setBucketSpan(1800L);
ac.setLatency(3600L); ac.setLatency(3600L);
builder.setAnalysisConfig(ac); builder.setAnalysisConfig(ac);
Job job = builder.build(); Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfig().build(); DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class,
() -> ScheduledJobValidator.validate(schedulerConfig, job)); () -> DatafeedJobValidator.validate(datafeedConfig, job));
assertEquals(errorMessage, e.getMessage()); assertEquals(errorMessage, e.getMessage());
} }
@ -43,9 +43,9 @@ public class ScheduledJobValidatorTests extends ESTestCase {
ac.setLatency(0L); ac.setLatency(0L);
builder.setAnalysisConfig(ac); builder.setAnalysisConfig(ac);
Job job = builder.build(); Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfig().build(); DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
ScheduledJobValidator.validate(schedulerConfig, job); DatafeedJobValidator.validate(datafeedConfig, job);
} }
public void testVerify_GivenNoLatency() { public void testVerify_GivenNoLatency() {
@ -55,9 +55,9 @@ public class ScheduledJobValidatorTests extends ESTestCase {
ac.setBucketSpan(100L); ac.setBucketSpan(100L);
builder.setAnalysisConfig(ac); builder.setAnalysisConfig(ac);
Job job = builder.build(); Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfig().build(); DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
ScheduledJobValidator.validate(schedulerConfig, job); DatafeedJobValidator.validate(datafeedConfig, job);
} }
public void testVerify_GivenAggsAndCorrectSummaryCountField() throws IOException { public void testVerify_GivenAggsAndCorrectSummaryCountField() throws IOException {
@ -67,40 +67,40 @@ public class ScheduledJobValidatorTests extends ESTestCase {
ac.setSummaryCountFieldName("doc_count"); ac.setSummaryCountFieldName("doc_count");
builder.setAnalysisConfig(ac); builder.setAnalysisConfig(ac);
Job job = builder.build(); Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfigWithAggs().build(); DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs().build();
ScheduledJobValidator.validate(schedulerConfig, job); DatafeedJobValidator.validate(datafeedConfig, job);
} }
public void testVerify_GivenAggsAndNoSummaryCountField() throws IOException { public void testVerify_GivenAggsAndNoSummaryCountField() throws IOException {
String errorMessage = Messages.getMessage(Messages.SCHEDULER_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, String errorMessage = Messages.getMessage(Messages.DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD,
SchedulerConfig.DOC_COUNT); DatafeedConfig.DOC_COUNT);
Job.Builder builder = buildJobBuilder("foo"); Job.Builder builder = buildJobBuilder("foo");
AnalysisConfig.Builder ac = createAnalysisConfig(); AnalysisConfig.Builder ac = createAnalysisConfig();
ac.setBucketSpan(1800L); ac.setBucketSpan(1800L);
builder.setAnalysisConfig(ac); builder.setAnalysisConfig(ac);
Job job = builder.build(); Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfigWithAggs().build(); DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs().build();
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class,
() -> ScheduledJobValidator.validate(schedulerConfig, job)); () -> DatafeedJobValidator.validate(datafeedConfig, job));
assertEquals(errorMessage, e.getMessage()); assertEquals(errorMessage, e.getMessage());
} }
public void testVerify_GivenAggsAndWrongSummaryCountField() throws IOException { public void testVerify_GivenAggsAndWrongSummaryCountField() throws IOException {
String errorMessage = Messages.getMessage( String errorMessage = Messages.getMessage(
Messages.SCHEDULER_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, SchedulerConfig.DOC_COUNT); Messages.DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, DatafeedConfig.DOC_COUNT);
Job.Builder builder = buildJobBuilder("foo"); Job.Builder builder = buildJobBuilder("foo");
AnalysisConfig.Builder ac = createAnalysisConfig(); AnalysisConfig.Builder ac = createAnalysisConfig();
ac.setBucketSpan(1800L); ac.setBucketSpan(1800L);
ac.setSummaryCountFieldName("wrong"); ac.setSummaryCountFieldName("wrong");
builder.setAnalysisConfig(ac); builder.setAnalysisConfig(ac);
Job job = builder.build(); Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfigWithAggs().build(); DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs().build();
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class,
() -> ScheduledJobValidator.validate(schedulerConfig, job)); () -> DatafeedJobValidator.validate(datafeedConfig, job));
assertEquals(errorMessage, e.getMessage()); assertEquals(errorMessage, e.getMessage());
} }
@ -121,16 +121,16 @@ public class ScheduledJobValidatorTests extends ESTestCase {
return ac; return ac;
} }
private static SchedulerConfig.Builder createValidSchedulerConfigWithAggs() throws IOException { private static DatafeedConfig.Builder createValidDatafeedConfigWithAggs() throws IOException {
SchedulerConfig.Builder schedulerConfig = createValidSchedulerConfig(); DatafeedConfig.Builder datafeedConfig = createValidDatafeedConfig();
schedulerConfig.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo"))); datafeedConfig.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo")));
return schedulerConfig; return datafeedConfig;
} }
private static SchedulerConfig.Builder createValidSchedulerConfig() { private static DatafeedConfig.Builder createValidDatafeedConfig() {
SchedulerConfig.Builder builder = new SchedulerConfig.Builder("my-scheduler", "my-job"); DatafeedConfig.Builder builder = new DatafeedConfig.Builder("my-datafeed", "my-job");
builder.setIndexes(Collections.singletonList("myIndex")); builder.setIndexes(Collections.singletonList("myIndex"));
builder.setTypes(Collections.singletonList("myType")); builder.setTypes(Collections.singletonList("myType"));
return builder; return builder;
} }
} }

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.test.ESTestCase;
public class DatafeedStatusTests extends ESTestCase {
public void testForString() {
assertEquals(DatafeedStatus.fromString("started"), DatafeedStatus.STARTED);
assertEquals(DatafeedStatus.fromString("stopped"), DatafeedStatus.STOPPED);
}
public void testValidOrdinals() {
assertEquals(0, DatafeedStatus.STARTED.ordinal());
assertEquals(1, DatafeedStatus.STOPPED.ordinal());
}
}

View File

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase;
public class DatafeedTests extends AbstractSerializingTestCase<Datafeed> {
@Override
protected Datafeed createTestInstance() {
return new Datafeed(DatafeedConfigTests.createRandomizedDatafeedConfig(randomAsciiOfLength(10)),
randomFrom(DatafeedStatus.values()));
}
@Override
protected Writeable.Reader<Datafeed> instanceReader() {
return Datafeed::new;
}
@Override
protected Datafeed parseInstance(XContentParser parser) {
return Datafeed.PARSER.apply(parser, null);
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler; package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.job.audit.Auditor; import org.elasticsearch.xpack.ml.job.audit.Auditor;
@ -29,14 +29,14 @@ public class ProblemTrackerTests extends ESTestCase {
public void testReportExtractionProblem() { public void testReportExtractionProblem() {
problemTracker.reportExtractionProblem("foo"); problemTracker.reportExtractionProblem("foo");
verify(auditor).error("Scheduler is encountering errors extracting data: foo"); verify(auditor).error("Datafeed is encountering errors extracting data: foo");
assertTrue(problemTracker.hasProblems()); assertTrue(problemTracker.hasProblems());
} }
public void testReportAnalysisProblem() { public void testReportAnalysisProblem() {
problemTracker.reportAnalysisProblem("foo"); problemTracker.reportAnalysisProblem("foo");
verify(auditor).error("Scheduler is encountering errors submitting data for analysis: foo"); verify(auditor).error("Datafeed is encountering errors submitting data for analysis: foo");
assertTrue(problemTracker.hasProblems()); assertTrue(problemTracker.hasProblems());
} }
@ -44,7 +44,7 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.reportExtractionProblem("foo"); problemTracker.reportExtractionProblem("foo");
problemTracker.reportAnalysisProblem("foo"); problemTracker.reportAnalysisProblem("foo");
verify(auditor, times(1)).error("Scheduler is encountering errors extracting data: foo"); verify(auditor, times(1)).error("Datafeed is encountering errors extracting data: foo");
assertTrue(problemTracker.hasProblems()); assertTrue(problemTracker.hasProblems());
} }
@ -53,7 +53,7 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.finishReport(); problemTracker.finishReport();
problemTracker.reportExtractionProblem("foo"); problemTracker.reportExtractionProblem("foo");
verify(auditor, times(1)).error("Scheduler is encountering errors extracting data: foo"); verify(auditor, times(1)).error("Datafeed is encountering errors extracting data: foo");
assertTrue(problemTracker.hasProblems()); assertTrue(problemTracker.hasProblems());
} }
@ -70,7 +70,7 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.updateEmptyDataCount(true); problemTracker.updateEmptyDataCount(true);
} }
verify(auditor).warning("Scheduler has been retrieving no data for a while"); verify(auditor).warning("Datafeed has been retrieving no data for a while");
} }
public void testUpdateEmptyDataCount_GivenEmptyElevenTimes() { public void testUpdateEmptyDataCount_GivenEmptyElevenTimes() {
@ -78,7 +78,7 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.updateEmptyDataCount(true); problemTracker.updateEmptyDataCount(true);
} }
verify(auditor, times(1)).warning("Scheduler has been retrieving no data for a while"); verify(auditor, times(1)).warning("Datafeed has been retrieving no data for a while");
} }
public void testUpdateEmptyDataCount_GivenNonEmptyAfterNineEmpty() { public void testUpdateEmptyDataCount_GivenNonEmptyAfterNineEmpty() {
@ -96,8 +96,8 @@ public class ProblemTrackerTests extends ESTestCase {
} }
problemTracker.updateEmptyDataCount(false); problemTracker.updateEmptyDataCount(false);
verify(auditor).warning("Scheduler has been retrieving no data for a while"); verify(auditor).warning("Datafeed has been retrieving no data for a while");
verify(auditor).info("Scheduler has started retrieving data again"); verify(auditor).info("Datafeed has started retrieving data again");
} }
public void testFinishReport_GivenNoProblems() { public void testFinishReport_GivenNoProblems() {
@ -112,8 +112,8 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.finishReport(); problemTracker.finishReport();
problemTracker.finishReport(); problemTracker.finishReport();
verify(auditor).error("Scheduler is encountering errors extracting data: bar"); verify(auditor).error("Datafeed is encountering errors extracting data: bar");
verify(auditor).info("Scheduler has recovered data extraction and analysis"); verify(auditor).info("Datafeed has recovered data extraction and analysis");
assertFalse(problemTracker.hasProblems()); assertFalse(problemTracker.hasProblems());
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
@ -127,4 +127,4 @@ public class ExtractedFieldTests extends ESTestCase {
return hit; return hit;
} }
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -77,4 +77,4 @@ public class ExtractedFieldsTests extends ESTestCase {
expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit)); expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit));
} }
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll; package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -69,4 +69,4 @@ public class SearchHitToJsonProcessorTests extends ESTestCase {
} }
return outputStream.toString(StandardCharsets.UTF_8.name()); return outputStream.toString(StandardCharsets.UTF_8.name());
} }
} }

View File

@ -24,7 +24,7 @@ import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
public class ScheduledJobIT extends ESRestTestCase { public class DatafeedJobIT extends ESRestTestCase {
@Before @Before
public void setUpData() throws Exception { public void setUpData() throws Exception {
@ -111,17 +111,17 @@ public class ScheduledJobIT extends ESRestTestCase {
new LookbackOnlyTestHelper("lookback-1", "airline-data").setShouldSucceedProcessing(true).execute(); new LookbackOnlyTestHelper("lookback-1", "airline-data").setShouldSucceedProcessing(true).execute();
} }
public void testLookbackOnlyWithSchedulerSourceEnabled() throws Exception { public void testLookbackOnlyWithDatafeedSourceEnabled() throws Exception {
new LookbackOnlyTestHelper("lookback-2", "airline-data").setEnableSchedulerSource(true).execute(); new LookbackOnlyTestHelper("lookback-2", "airline-data").setEnableDatafeedSource(true).execute();
} }
public void testLookbackOnlyWithDocValuesDisabledAndSchedulerSourceDisabled() throws Exception { public void testLookbackOnlyWithDocValuesDisabledAndDatafeedSourceDisabled() throws Exception {
new LookbackOnlyTestHelper("lookback-3", "airline-data-disabled-doc-values").setShouldSucceedInput(false) new LookbackOnlyTestHelper("lookback-3", "airline-data-disabled-doc-values").setShouldSucceedInput(false)
.setShouldSucceedProcessing(false).execute(); .setShouldSucceedProcessing(false).execute();
} }
public void testLookbackOnlyWithDocValuesDisabledAndSchedulerSourceEnabled() throws Exception { public void testLookbackOnlyWithDocValuesDisabledAndDatafeedSourceEnabled() throws Exception {
new LookbackOnlyTestHelper("lookback-4", "airline-data-disabled-doc-values").setEnableSchedulerSource(true).execute(); new LookbackOnlyTestHelper("lookback-4", "airline-data-disabled-doc-values").setEnableDatafeedSource(true).execute();
} }
public void testLookbackOnlyWithSourceDisabled() throws Exception { public void testLookbackOnlyWithSourceDisabled() throws Exception {
@ -132,23 +132,23 @@ public class ScheduledJobIT extends ESRestTestCase {
new LookbackOnlyTestHelper("lookback-6", "airline-data-disabled-source").setAddScriptedFields(true).execute(); new LookbackOnlyTestHelper("lookback-6", "airline-data-disabled-source").setAddScriptedFields(true).execute();
} }
public void testLookbackOnlyWithNestedFieldsAndSchedulerSourceDisabled() throws Exception { public void testLookbackOnlyWithNestedFieldsAndDatafeedSourceDisabled() throws Exception {
executeTestLookbackOnlyWithNestedFields("lookback-7", false); executeTestLookbackOnlyWithNestedFields("lookback-7", false);
} }
public void testLookbackOnlyWithNestedFieldsAndSchedulerSourceEnabled() throws Exception { public void testLookbackOnlyWithNestedFieldsAndDatafeedSourceEnabled() throws Exception {
executeTestLookbackOnlyWithNestedFields("lookback-8", true); executeTestLookbackOnlyWithNestedFields("lookback-8", true);
} }
public void testRealtime() throws Exception { public void testRealtime() throws Exception {
String jobId = "job-realtime-1"; String jobId = "job-realtime-1";
createJob(jobId); createJob(jobId);
String schedulerId = jobId + "-scheduler"; String datafeedId = jobId + "-datafeed";
createScheduler(schedulerId, jobId, "airline-data", false, false); createDatafeed(datafeedId, jobId, "airline-data", false, false);
openJob(client(), jobId); openJob(client(), jobId);
Response response = client().performRequest("post", Response response = client().performRequest("post",
MlPlugin.BASE_PATH + "schedulers/" + schedulerId + "/_start?start=2016-06-01T00:00:00Z"); MlPlugin.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z");
assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(response), equalTo("{\"started\":true}")); assertThat(responseEntityToString(response), equalTo("{\"started\":true}"));
assertBusy(() -> { assertBusy(() -> {
@ -166,16 +166,16 @@ public class ScheduledJobIT extends ESRestTestCase {
() -> client().performRequest("delete", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId)); () -> client().performRequest("delete", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId));
response = e.getResponse(); response = e.getResponse();
assertThat(response.getStatusLine().getStatusCode(), equalTo(409)); assertThat(response.getStatusLine().getStatusCode(), equalTo(409));
assertThat(responseEntityToString(response), containsString("Cannot delete job [" + jobId + "] while scheduler [" + schedulerId assertThat(responseEntityToString(response), containsString("Cannot delete job [" + jobId + "] while datafeed [" + datafeedId
+ "] refers to it")); + "] refers to it"));
response = client().performRequest("post", MlPlugin.BASE_PATH + "schedulers/" + schedulerId + "/_stop"); response = client().performRequest("post", MlPlugin.BASE_PATH + "datafeeds/" + datafeedId + "/_stop");
assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}")); assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
client().performRequest("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close"); client().performRequest("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close");
response = client().performRequest("delete", MlPlugin.BASE_PATH + "schedulers/" + schedulerId); response = client().performRequest("delete", MlPlugin.BASE_PATH + "datafeeds/" + datafeedId);
assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}")); assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
@ -188,7 +188,7 @@ public class ScheduledJobIT extends ESRestTestCase {
private String jobId; private String jobId;
private String dataIndex; private String dataIndex;
private boolean addScriptedFields; private boolean addScriptedFields;
private boolean enableSchedulerSource; private boolean enableDatafeedSource;
private boolean shouldSucceedInput; private boolean shouldSucceedInput;
private boolean shouldSucceedProcessing; private boolean shouldSucceedProcessing;
@ -204,8 +204,8 @@ public class ScheduledJobIT extends ESRestTestCase {
return this; return this;
} }
public LookbackOnlyTestHelper setEnableSchedulerSource(boolean value) { public LookbackOnlyTestHelper setEnableDatafeedSource(boolean value) {
enableSchedulerSource = value; enableDatafeedSource = value;
return this; return this;
} }
@ -221,11 +221,11 @@ public class ScheduledJobIT extends ESRestTestCase {
public void execute() throws Exception { public void execute() throws Exception {
createJob(jobId); createJob(jobId);
String schedulerId = "scheduler-" + jobId; String datafeedId = "datafeed-" + jobId;
createScheduler(schedulerId, jobId, dataIndex, enableSchedulerSource, addScriptedFields); createDatafeed(datafeedId, jobId, dataIndex, enableDatafeedSource, addScriptedFields);
openJob(client(), jobId); openJob(client(), jobId);
startSchedulerAndWaitUntilStopped(schedulerId); startDatafeedAndWaitUntilStopped(datafeedId);
Response jobStatsResponse = client().performRequest("get", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"); Response jobStatsResponse = client().performRequest("get", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse); String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
if (shouldSucceedInput) { if (shouldSucceedInput) {
@ -242,16 +242,16 @@ public class ScheduledJobIT extends ESRestTestCase {
} }
} }
private void startSchedulerAndWaitUntilStopped(String schedulerId) throws Exception { private void startDatafeedAndWaitUntilStopped(String datafeedId) throws Exception {
Response startSchedulerRequest = client().performRequest("post", Response startDatafeedRequest = client().performRequest("post",
MlPlugin.BASE_PATH + "schedulers/" + schedulerId + "/_start?start=2016-06-01T00:00:00Z&end=2016-06-02T00:00:00Z"); MlPlugin.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z&end=2016-06-02T00:00:00Z");
assertThat(startSchedulerRequest.getStatusLine().getStatusCode(), equalTo(200)); assertThat(startDatafeedRequest.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(startSchedulerRequest), equalTo("{\"started\":true}")); assertThat(responseEntityToString(startDatafeedRequest), equalTo("{\"started\":true}"));
assertBusy(() -> { assertBusy(() -> {
try { try {
Response schedulerStatsResponse = client().performRequest("get", Response datafeedStatsResponse = client().performRequest("get",
MlPlugin.BASE_PATH + "schedulers/" + schedulerId + "/_stats"); MlPlugin.BASE_PATH + "datafeeds/" + datafeedId + "/_stats");
assertThat(responseEntityToString(schedulerStatsResponse), containsString("\"status\":\"STOPPED\"")); assertThat(responseEntityToString(datafeedStatsResponse), containsString("\"status\":\"STOPPED\""));
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -270,14 +270,14 @@ public class ScheduledJobIT extends ESRestTestCase {
Collections.emptyMap(), new StringEntity(job)); Collections.emptyMap(), new StringEntity(job));
} }
private Response createScheduler(String schedulerId, String jobId, String dataIndex, boolean source, boolean addScriptedFields) private Response createDatafeed(String datafeedId, String jobId, String dataIndex, boolean source, boolean addScriptedFields)
throws IOException { throws IOException {
String schedulerConfig = "{" + "\"job_id\": \"" + jobId + "\",\n" + "\"indexes\":[\"" + dataIndex + "\"],\n" String datafeedConfig = "{" + "\"job_id\": \"" + jobId + "\",\n" + "\"indexes\":[\"" + dataIndex + "\"],\n"
+ "\"types\":[\"response\"]" + (source ? ",\"_source\":true" : "") + (addScriptedFields ? + "\"types\":[\"response\"]" + (source ? ",\"_source\":true" : "") + (addScriptedFields ?
",\"script_fields\":{\"airline\":{\"script\":{\"lang\":\"painless\",\"inline\":\"doc['airline'].value\"}}}" : "") ",\"script_fields\":{\"airline\":{\"script\":{\"lang\":\"painless\",\"inline\":\"doc['airline'].value\"}}}" : "")
+"}"; +"}";
return client().performRequest("put", MlPlugin.BASE_PATH + "schedulers/" + schedulerId, Collections.emptyMap(), return client().performRequest("put", MlPlugin.BASE_PATH + "datafeeds/" + datafeedId, Collections.emptyMap(),
new StringEntity(schedulerConfig)); new StringEntity(datafeedConfig));
} }
private static String responseEntityToString(Response response) throws Exception { private static String responseEntityToString(Response response) throws Exception {
@ -297,11 +297,11 @@ public class ScheduledJobIT extends ESRestTestCase {
+ "}"; + "}";
client().performRequest("put", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(), new StringEntity(job)); client().performRequest("put", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(), new StringEntity(job));
String schedulerId = jobId + "-scheduler"; String datafeedId = jobId + "-datafeed";
createScheduler(schedulerId, jobId, "nested-data", source, false); createDatafeed(datafeedId, jobId, "nested-data", source, false);
openJob(client(), jobId); openJob(client(), jobId);
startSchedulerAndWaitUntilStopped(schedulerId); startDatafeedAndWaitUntilStopped(datafeedId);
Response jobStatsResponse = client().performRequest("get", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"); Response jobStatsResponse = client().performRequest("get", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse); String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2")); assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));

View File

@ -25,29 +25,29 @@ public class MlRestTestStateCleaner {
} }
public void clearMlMetadata() throws IOException { public void clearMlMetadata() throws IOException {
deleteAllSchedulers(); deleteAllDatafeeds();
deleteAllJobs(); deleteAllJobs();
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private void deleteAllSchedulers() throws IOException { private void deleteAllDatafeeds() throws IOException {
Map<String, Object> clusterStateAsMap = testCase.entityAsMap(client.performRequest("GET", "/_cluster/state", Map<String, Object> clusterStateAsMap = testCase.entityAsMap(client.performRequest("GET", "/_cluster/state",
Collections.singletonMap("filter_path", "metadata.ml.schedulers"))); Collections.singletonMap("filter_path", "metadata.ml.datafeeds")));
List<Map<String, Object>> schedulers = List<Map<String, Object>> datafeeds =
(List<Map<String, Object>>) XContentMapValues.extractValue("metadata.ml.schedulers", clusterStateAsMap); (List<Map<String, Object>>) XContentMapValues.extractValue("metadata.ml.datafeeds", clusterStateAsMap);
if (schedulers == null) { if (datafeeds == null) {
return; return;
} }
for (Map<String, Object> scheduler : schedulers) { for (Map<String, Object> datafeed : datafeeds) {
Map<String, Object> schedulerMap = (Map<String, Object>) scheduler.get("config"); Map<String, Object> datafeedMap = (Map<String, Object>) datafeed.get("config");
String schedulerId = (String) schedulerMap.get("scheduler_id"); String datafeedId = (String) datafeedMap.get("datafeed_id");
try { try {
client.performRequest("POST", "/_xpack/ml/schedulers/" + schedulerId + "/_stop"); client.performRequest("POST", "/_xpack/ml/datafeeds/" + datafeedId + "/_stop");
} catch (Exception e) { } catch (Exception e) {
// ignore // ignore
} }
client.performRequest("DELETE", "/_xpack/ml/schedulers/" + schedulerId); client.performRequest("DELETE", "/_xpack/ml/datafeeds/" + datafeedId);
} }
} }

View File

@ -20,7 +20,7 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.xpack.ml.MlPlugin; import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.OpenJobAction; import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PutJobAction; import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.ScheduledJobsIT; import org.elasticsearch.xpack.ml.action.DatafeedJobsIT;
import org.elasticsearch.xpack.ml.job.AnalysisConfig; import org.elasticsearch.xpack.ml.job.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.DataDescription; import org.elasticsearch.xpack.ml.job.DataDescription;
import org.elasticsearch.xpack.ml.job.Detector; import org.elasticsearch.xpack.ml.job.Detector;
@ -54,7 +54,7 @@ public class TooManyJobsIT extends ESIntegTestCase {
@After @After
public void clearMlMetadata() throws Exception { public void clearMlMetadata() throws Exception {
ScheduledJobsIT.clearMlMetadata(client()); DatafeedJobsIT.clearMlMetadata(client());
} }
public void testCannotStartTooManyAnalyticalProcesses() throws Exception { public void testCannotStartTooManyAnalyticalProcesses() throws Exception {

View File

@ -19,16 +19,16 @@ import org.elasticsearch.xpack.ml.job.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.JobStatus; import org.elasticsearch.xpack.ml.job.JobStatus;
import org.elasticsearch.xpack.ml.job.JobTests; import org.elasticsearch.xpack.ml.job.JobTests;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfigTests; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus; import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase; import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase;
import java.io.IOException; import java.io.IOException;
import static org.elasticsearch.xpack.ml.job.JobTests.buildJobBuilder; import static org.elasticsearch.xpack.ml.job.JobTests.buildJobBuilder;
import static org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests.createScheduledJob; import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedJob;
import static org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests.createSchedulerConfig; import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedConfig;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
@ -43,8 +43,8 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
for (int i = 0; i < numJobs; i++) { for (int i = 0; i < numJobs; i++) {
Job job = JobTests.createRandomizedJob(); Job job = JobTests.createRandomizedJob();
if (randomBoolean()) { if (randomBoolean()) {
SchedulerConfig schedulerConfig = SchedulerConfigTests.createRandomizedSchedulerConfig(job.getId()); DatafeedConfig datafeedConfig = DatafeedConfigTests.createRandomizedDatafeedConfig(job.getId());
if (schedulerConfig.getAggregations() != null) { if (datafeedConfig.getAggregations() != null) {
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(job.getAnalysisConfig().getDetectors()); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(job.getAnalysisConfig().getDetectors());
analysisConfig.setSummaryCountFieldName("doc_count"); analysisConfig.setSummaryCountFieldName("doc_count");
Job.Builder jobBuilder = new Job.Builder(job); Job.Builder jobBuilder = new Job.Builder(job);
@ -52,9 +52,9 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
job = jobBuilder.build(); job = jobBuilder.build();
} }
builder.putJob(job, false); builder.putJob(job, false);
builder.putScheduler(schedulerConfig); builder.putDatafeed(datafeedConfig);
if (randomBoolean()) { if (randomBoolean()) {
builder.updateSchedulerStatus(schedulerConfig.getId(), SchedulerStatus.STARTED); builder.updateDatafeedStatus(datafeedConfig.getId(), DatafeedStatus.STARTED);
} }
} else { } else {
builder.putJob(job, false); builder.putJob(job, false);
@ -107,10 +107,10 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("1"), sameInstance(job1)); assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED)); assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
assertThat(result.getJobs().get("2"), sameInstance(job2)); assertThat(result.getJobs().get("2"), sameInstance(job2));
assertThat(result.getAllocations().get("2").getStatus(), equalTo(JobStatus.CLOSED)); assertThat(result.getAllocations().get("2").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("2"), nullValue()); assertThat(result.getDatafeeds().get("2"), nullValue());
builder = new MlMetadata.Builder(result); builder = new MlMetadata.Builder(result);
@ -134,19 +134,19 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("1"), sameInstance(job1)); assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED)); assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
builder = new MlMetadata.Builder(result); builder = new MlMetadata.Builder(result);
builder.updateStatus("1", JobStatus.DELETING, null); builder.updateStatus("1", JobStatus.DELETING, null);
assertThat(result.getJobs().get("1"), sameInstance(job1)); assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED)); assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
builder.deleteJob("1"); builder.deleteJob("1");
result = builder.build(); result = builder.build();
assertThat(result.getJobs().get("1"), nullValue()); assertThat(result.getJobs().get("1"), nullValue());
assertThat(result.getAllocations().get("1"), nullValue()); assertThat(result.getAllocations().get("1"), nullValue());
assertThat(result.getSchedulers().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
} }
public void testRemoveJob_failBecauseJobIsOpen() { public void testRemoveJob_failBecauseJobIsOpen() {
@ -159,23 +159,23 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata result = builder1.build(); MlMetadata result = builder1.build();
assertThat(result.getJobs().get("1"), sameInstance(job1)); assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.OPENED)); assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.OPENED));
assertThat(result.getSchedulers().get("1"), nullValue()); assertThat(result.getDatafeeds().get("1"), nullValue());
MlMetadata.Builder builder2 = new MlMetadata.Builder(result); MlMetadata.Builder builder2 = new MlMetadata.Builder(result);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder2.deleteJob("1")); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder2.deleteJob("1"));
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
} }
public void testRemoveJob_failSchedulerRefersToJob() { public void testRemoveJob_failDatafeedRefersToJob() {
Job job1 = createScheduledJob().build(); Job job1 = createDatafeedJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1); builder.putDatafeed(datafeedConfig1);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder.deleteJob(job1.getId())); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder.deleteJob(job1.getId()));
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
String expectedMsg = "Cannot delete job [" + job1.getId() + "] while scheduler [" + schedulerConfig1.getId() + "] refers to it"; String expectedMsg = "Cannot delete job [" + job1.getId() + "] while datafeed [" + datafeedConfig1.getId() + "] refers to it";
assertThat(e.getMessage(), equalTo(expectedMsg)); assertThat(e.getMessage(), equalTo(expectedMsg));
} }
@ -184,87 +184,87 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
expectThrows(ResourceNotFoundException.class, () -> builder1.deleteJob("1")); expectThrows(ResourceNotFoundException.class, () -> builder1.deleteJob("1"));
} }
public void testCrudScheduler() { public void testCrudDatafeed() {
Job job1 = createScheduledJob().build(); Job job1 = createDatafeedJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1); builder.putDatafeed(datafeedConfig1);
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1)); assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.CLOSED)); assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("scheduler1").getConfig(), sameInstance(schedulerConfig1)); assertThat(result.getDatafeeds().get("datafeed1").getConfig(), sameInstance(datafeedConfig1));
assertThat(result.getSchedulers().get("scheduler1").getStatus(), equalTo(SchedulerStatus.STOPPED)); assertThat(result.getDatafeeds().get("datafeed1").getStatus(), equalTo(DatafeedStatus.STOPPED));
builder = new MlMetadata.Builder(result); builder = new MlMetadata.Builder(result);
builder.removeScheduler("scheduler1"); builder.removeDatafeed("datafeed1");
result = builder.build(); result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1)); assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.CLOSED)); assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("scheduler1"), nullValue()); assertThat(result.getDatafeeds().get("datafeed1"), nullValue());
} }
public void testPutScheduler_failBecauseJobDoesNotExist() { public void testPutDatafeed_failBecauseJobDoesNotExist() {
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", "missing-job").build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", "missing-job").build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
expectThrows(ResourceNotFoundException.class, () -> builder.putScheduler(schedulerConfig1)); expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1));
} }
public void testPutScheduler_failBecauseSchedulerIdIsAlreadyTaken() { public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() {
Job job1 = createScheduledJob().build(); Job job1 = createDatafeedJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1); builder.putDatafeed(datafeedConfig1);
expectThrows(ResourceAlreadyExistsException.class, () -> builder.putScheduler(schedulerConfig1)); expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1));
} }
public void testPutScheduler_failBecauseJobAlreadyHasScheduler() { public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() {
Job job1 = createScheduledJob().build(); Job job1 = createDatafeedJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
SchedulerConfig schedulerConfig2 = createSchedulerConfig("scheduler2", job1.getId()).build(); DatafeedConfig datafeedConfig2 = createDatafeedConfig("datafeed2", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1); builder.putDatafeed(datafeedConfig1);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> builder.putScheduler(schedulerConfig2)); () -> builder.putDatafeed(datafeedConfig2));
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
} }
public void testPutScheduler_failBecauseJobIsNotCompatibleForScheduler() { public void testPutDatafeed_failBecauseJobIsNotCompatibleForDatafeed() {
Job.Builder job1 = createScheduledJob(); Job.Builder job1 = createDatafeedJob();
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(job1.build().getAnalysisConfig()); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(job1.build().getAnalysisConfig());
analysisConfig.setLatency(3600L); analysisConfig.setLatency(3600L);
job1.setAnalysisConfig(analysisConfig); job1.setAnalysisConfig(analysisConfig);
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1.build(), false); builder.putJob(job1.build(), false);
expectThrows(IllegalArgumentException.class, () -> builder.putScheduler(schedulerConfig1)); expectThrows(IllegalArgumentException.class, () -> builder.putDatafeed(datafeedConfig1));
} }
public void testRemoveScheduler_failBecauseSchedulerStarted() { public void testRemoveDatafeed_failBecauseDatafeedStarted() {
Job job1 = createScheduledJob().build(); Job job1 = createDatafeedJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build(); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder(); MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false); builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1); builder.putDatafeed(datafeedConfig1);
builder.updateStatus("foo", JobStatus.OPENING, null); builder.updateStatus("foo", JobStatus.OPENING, null);
builder.updateStatus("foo", JobStatus.OPENED, null); builder.updateStatus("foo", JobStatus.OPENED, null);
builder.updateSchedulerStatus("scheduler1", SchedulerStatus.STARTED); builder.updateDatafeedStatus("datafeed1", DatafeedStatus.STARTED);
MlMetadata result = builder.build(); MlMetadata result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1)); assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.OPENED)); assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.OPENED));
assertThat(result.getSchedulers().get("scheduler1").getConfig(), sameInstance(schedulerConfig1)); assertThat(result.getDatafeeds().get("datafeed1").getConfig(), sameInstance(datafeedConfig1));
assertThat(result.getSchedulers().get("scheduler1").getStatus(), equalTo(SchedulerStatus.STARTED)); assertThat(result.getDatafeeds().get("datafeed1").getStatus(), equalTo(DatafeedStatus.STARTED));
MlMetadata.Builder builder2 = new MlMetadata.Builder(result); MlMetadata.Builder builder2 = new MlMetadata.Builder(result);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder2.removeScheduler("scheduler1")); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder2.removeDatafeed("datafeed1"));
assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.status(), equalTo(RestStatus.CONFLICT));
} }

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.ml.rest.schedulers; package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
@ -14,24 +14,24 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.FakeRestRequest;
import org.elasticsearch.xpack.ml.job.Job; import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests; import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
public class RestStartJobSchedulerActionTests extends ESTestCase { public class RestStartJobDatafeedActionTests extends ESTestCase {
public void testPrepareRequest() throws Exception { public void testPrepareRequest() throws Exception {
Job.Builder job = ScheduledJobRunnerTests.createScheduledJob(); Job.Builder job = DatafeedJobRunnerTests.createDatafeedJob();
SchedulerConfig schedulerConfig = ScheduledJobRunnerTests.createSchedulerConfig("foo-scheduler", "foo").build(); DatafeedConfig datafeedConfig = DatafeedJobRunnerTests.createDatafeedConfig("foo-datafeed", "foo").build();
RestStartSchedulerAction action = new RestStartSchedulerAction(Settings.EMPTY, mock(RestController.class)); RestStartDatafeedAction action = new RestStartDatafeedAction(Settings.EMPTY, mock(RestController.class));
Map<String, String> params = new HashMap<>(); Map<String, String> params = new HashMap<>();
params.put("start", "not-a-date"); params.put("start", "not-a-date");
params.put("scheduler_id", "foo-scheduler"); params.put("datafeed_id", "foo-datafeed");
RestRequest restRequest1 = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build(); RestRequest restRequest1 = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build();
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> action.prepareRequest(restRequest1, mock(NodeClient.class))); () -> action.prepareRequest(restRequest1, mock(NodeClient.class)));
@ -40,7 +40,7 @@ public class RestStartJobSchedulerActionTests extends ESTestCase {
params = new HashMap<>(); params = new HashMap<>();
params.put("end", "not-a-date"); params.put("end", "not-a-date");
params.put("scheduler_id", "foo-scheduler"); params.put("datafeed_id", "foo-datafeed");
RestRequest restRequest2 = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build(); RestRequest restRequest2 = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build();
e = expectThrows(ElasticsearchParseException.class, () -> action.prepareRequest(restRequest2, mock(NodeClient.class))); e = expectThrows(ElasticsearchParseException.class, () -> action.prepareRequest(restRequest2, mock(NodeClient.class)));
assertEquals("Query param 'end' with value 'not-a-date' cannot be parsed as a date or converted to a number (epoch).", assertEquals("Query param 'end' with value 'not-a-date' cannot be parsed as a date or converted to a number (epoch).",
@ -48,11 +48,11 @@ public class RestStartJobSchedulerActionTests extends ESTestCase {
} }
public void testParseDateOrThrow() { public void testParseDateOrThrow() {
assertEquals(0L, RestStartSchedulerAction.parseDateOrThrow("0", "start")); assertEquals(0L, RestStartDatafeedAction.parseDateOrThrow("0", "start"));
assertEquals(0L, RestStartSchedulerAction.parseDateOrThrow("1970-01-01T00:00:00Z", "start")); assertEquals(0L, RestStartDatafeedAction.parseDateOrThrow("1970-01-01T00:00:00Z", "start"));
Exception e = expectThrows(ElasticsearchParseException.class, Exception e = expectThrows(ElasticsearchParseException.class,
() -> RestStartSchedulerAction.parseDateOrThrow("not-a-date", "start")); () -> RestStartDatafeedAction.parseDateOrThrow("not-a-date", "start"));
assertEquals("Query param 'start' with value 'not-a-date' cannot be parsed as a date or converted to a number (epoch).", assertEquals("Query param 'start' with value 'not-a-date' cannot be parsed as a date or converted to a number (epoch).",
e.getMessage()); e.getMessage());
} }

View File

@ -1,22 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
import org.elasticsearch.test.ESTestCase;
public class SchedulerStatusTests extends ESTestCase {
public void testForString() {
assertEquals(SchedulerStatus.fromString("started"), SchedulerStatus.STARTED);
assertEquals(SchedulerStatus.fromString("stopped"), SchedulerStatus.STOPPED);
}
public void testValidOrdinals() {
assertEquals(0, SchedulerStatus.STARTED.ordinal());
assertEquals(1, SchedulerStatus.STOPPED.ordinal());
}
}

View File

@ -1,29 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase;
public class SchedulerTests extends AbstractSerializingTestCase<Scheduler> {
@Override
protected Scheduler createTestInstance() {
return new Scheduler(SchedulerConfigTests.createRandomizedSchedulerConfig(randomAsciiOfLength(10)),
randomFrom(SchedulerStatus.values()));
}
@Override
protected Writeable.Reader<Scheduler> instanceReader() {
return Scheduler::new;
}
@Override
protected Scheduler parseInstance(XContentParser parser) {
return Scheduler.PARSER.apply(parser, null);
}
}

View File

@ -0,0 +1,17 @@
{
"xpack.ml.delete_datafeed": {
"methods": [ "DELETE" ],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}",
"paths": [ "/_xpack/ml/datafeeds/{datafeed_id}" ],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeed to delete"
}
}
},
"body": null
}
}

View File

@ -1,17 +0,0 @@
{
"xpack.ml.delete_scheduler": {
"methods": [ "DELETE" ],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}",
"paths": [ "/_xpack/ml/schedulers/{scheduler_id}" ],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the scheduler to delete"
}
}
},
"body": null
}
}

View File

@ -0,0 +1,19 @@
{
"xpack.ml.get_datafeeds": {
"methods": [ "GET"],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}",
"paths": [
"/_xpack/ml/datafeeds/{datafeed_id}"
],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeeds to fetch"
}
}
},
"body": null
}
}

View File

@ -0,0 +1,19 @@
{
"xpack.ml.get_datafeeds_stats": {
"methods": [ "GET"],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}/_stats",
"paths": [
"/_xpack/ml/datafeeds/{datafeed_id}/_stats"
],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeeds stats to fetch"
}
}
},
"body": null
}
}

View File

@ -1,19 +0,0 @@
{
"xpack.ml.get_schedulers": {
"methods": [ "GET"],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}",
"paths": [
"/_xpack/ml/schedulers/{scheduler_id}"
],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the schedulers to fetch"
}
}
},
"body": null
}
}

View File

@ -1,19 +0,0 @@
{
"xpack.ml.get_schedulers_stats": {
"methods": [ "GET"],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}/_stats",
"paths": [
"/_xpack/ml/schedulers/{scheduler_id}/_stats"
],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the schedulers stats to fetch"
}
}
},
"body": null
}
}

View File

@ -0,0 +1,20 @@
{
"xpack.ml.put_datafeed": {
"methods": [ "PUT" ],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}",
"paths": [ "/_xpack/ml/datafeeds/{datafeed_id}" ],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeed to create"
}
}
},
"body": {
"description" : "The datafeed config",
"required" : true
}
}
}

View File

@ -1,20 +0,0 @@
{
"xpack.ml.put_scheduler": {
"methods": [ "PUT" ],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}",
"paths": [ "/_xpack/ml/schedulers/{scheduler_id}" ],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the scheduler to create"
}
}
},
"body": {
"description" : "The scheduler config",
"required" : true
}
}
}

View File

@ -0,0 +1,35 @@
{
"xpack.ml.start_datafeed": {
"methods": [ "POST" ],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}/_start",
"paths": [ "/_xpack/ml/datafeeds/{datafeed_id}/_start" ],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeed to start"
}
},
"params": {
"start": {
"type": "string",
"required": false,
"description": "The start time from where the datafeed should begin"
},
"end": {
"type": "string",
"required": false,
"description": "The end time when the datafeed should stop. When not set, the datafeed continues in real time"
},
"start_timeout": {
"type": "time",
"description": "Controls the time to wait until a datafeed has started. Default to 30 seconds"
}
}
},
"body": {
"description": "The start datafeed parameters"
}
}
}

View File

@ -1,35 +0,0 @@
{
"xpack.ml.start_scheduler": {
"methods": [ "POST" ],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}/_start",
"paths": [ "/_xpack/ml/schedulers/{scheduler_id}/_start" ],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the scheduler to start"
}
},
"params": {
"start": {
"type": "string",
"required": false,
"description": "The start time from where the scheduler should begin"
},
"end": {
"type": "string",
"required": false,
"description": "The end time when the scheduler should stop. When not set, the scheduler continues in real time"
},
"start_timeout": {
"type": "time",
"description": "Controls the time to wait until a scheduler has started. Default to 30 seconds"
}
}
},
"body": {
"description": "The start scheduler parameters"
}
}
}

View File

@ -0,0 +1,25 @@
{
"xpack.ml.stop_datafeed": {
"methods": [
"POST"
],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}/_stop",
"paths": [
"/_xpack/ml/datafeeds/{datafeed_id}/_stop"
],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeed to stop"
},
"stop_timeout": {
"type": "time",
"description": "Controls the time to wait until a datafeed has stopped. Default to 30 seconds"
}
},
"body": null
}
}
}

View File

@ -1,25 +0,0 @@
{
"xpack.ml.stop_scheduler": {
"methods": [
"POST"
],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}/_stop",
"paths": [
"/_xpack/ml/schedulers/{scheduler_id}/_stop"
],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the scheduler to stop"
},
"stop_timeout": {
"type": "time",
"description": "Controls the time to wait until a scheduler has stopped. Default to 30 seconds"
}
},
"body": null
}
}
}

View File

@ -31,26 +31,26 @@ setup:
} }
--- ---
"Test get all schedulers and stats given no scheduler exists": "Test get all datafeeds and stats given no datafeed exists":
- do: - do:
xpack.ml.get_schedulers: xpack.ml.get_datafeeds:
scheduler_id: "_all" datafeed_id: "_all"
- match: { count: 0 } - match: { count: 0 }
- match: { schedulers: [] } - match: { datafeeds: [] }
- do: - do:
xpack.ml.get_schedulers_stats: xpack.ml.get_datafeeds_stats:
scheduler_id: "_all" datafeed_id: "_all"
- match: { count: 0 } - match: { count: 0 }
- match: { schedulers: [] } - match: { datafeeds: [] }
--- ---
"Test put scheduler referring to missing job_id": "Test put datafeed referring to missing job_id":
- do: - do:
catch: /resource_not_found_exception/ catch: /resource_not_found_exception/
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
body: > body: >
{ {
"job_id":"a-missing-job", "job_id":"a-missing-job",
@ -59,35 +59,35 @@ setup:
} }
--- ---
"Test put scheduler referring to existing job_id": "Test put datafeed referring to existing job_id":
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
body: > body: >
{ {
"job_id":"job-1", "job_id":"job-1",
"indexes":["index-foo"], "indexes":["index-foo"],
"types":["type-bar"] "types":["type-bar"]
} }
- match: { scheduler_id: "test-scheduler-1" } - match: { datafeed_id: "test-datafeed-1" }
--- ---
"Test put scheduler whose id is already taken": "Test put datafeed whose id is already taken":
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
body: > body: >
{ {
"job_id":"job-1", "job_id":"job-1",
"indexes":["index-foo"], "indexes":["index-foo"],
"types":["type-bar"] "types":["type-bar"]
} }
- match: { scheduler_id: "test-scheduler-1" } - match: { datafeed_id: "test-datafeed-1" }
- do: - do:
catch: /resource_already_exists_exception/ catch: /resource_already_exists_exception/
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
body: > body: >
{ {
"job_id":"job-2", "job_id":"job-2",
@ -96,22 +96,22 @@ setup:
} }
--- ---
"Test put scheduler with job_id that is already used by another scheduler": "Test put datafeed with job_id that is already used by another datafeed":
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
body: > body: >
{ {
"job_id":"job-1", "job_id":"job-1",
"indexes":["index-foo"], "indexes":["index-foo"],
"types":["type-bar"] "types":["type-bar"]
} }
- match: { scheduler_id: "test-scheduler-1" } - match: { datafeed_id: "test-datafeed-1" }
- do: - do:
catch: /A scheduler \[test-scheduler-1\] already exists for job \[job-1\]/ catch: /A datafeed \[test-datafeed-1\] already exists for job \[job-1\]/
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-2 datafeed_id: test-datafeed-2
body: > body: >
{ {
"job_id":"job-1", "job_id":"job-1",
@ -120,11 +120,11 @@ setup:
} }
--- ---
"Test put scheduler with invalid query": "Test put datafeed with invalid query":
- do: - do:
catch: /parsing_exception/ catch: /parsing_exception/
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
body: > body: >
{ {
"job_id":"job-1", "job_id":"job-1",
@ -134,26 +134,26 @@ setup:
} }
--- ---
"Test delete scheduler with missing id": "Test delete datafeed with missing id":
- do: - do:
catch: /resource_not_found_exception/ catch: /resource_not_found_exception/
xpack.ml.delete_scheduler: xpack.ml.delete_datafeed:
scheduler_id: a-missing-scheduler datafeed_id: a-missing-datafeed
--- ---
"Test delete scheduler": "Test delete datafeed":
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
body: > body: >
{ {
"job_id":"job-1", "job_id":"job-1",
"indexes":["index-foo"], "indexes":["index-foo"],
"types":["type-bar"] "types":["type-bar"]
} }
- match: { scheduler_id: "test-scheduler-1" } - match: { datafeed_id: "test-datafeed-1" }
- do: - do:
xpack.ml.delete_scheduler: xpack.ml.delete_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
- match: { acknowledged: true } - match: { acknowledged: true }

View File

@ -32,8 +32,8 @@ setup:
} }
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: scheduler-1 datafeed_id: datafeed-1
body: > body: >
{ {
"job_id":"job-1", "job_id":"job-1",
@ -42,8 +42,8 @@ setup:
} }
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: scheduler-2 datafeed_id: datafeed-2
body: > body: >
{ {
"job_id":"job-2", "job_id":"job-2",
@ -52,36 +52,36 @@ setup:
} }
--- ---
"Test get scheduler given missing scheduler_id": "Test get datafeed given missing datafeed_id":
- do: - do:
catch: missing catch: missing
xpack.ml.get_schedulers: xpack.ml.get_datafeeds:
scheduler_id: missing-scheduler datafeed_id: missing-datafeed
--- ---
"Test get single scheduler": "Test get single datafeed":
- do: - do:
xpack.ml.get_schedulers: xpack.ml.get_datafeeds:
scheduler_id: scheduler-1 datafeed_id: datafeed-1
- match: { schedulers.0.scheduler_id: "scheduler-1"} - match: { datafeeds.0.datafeed_id: "datafeed-1"}
- match: { schedulers.0.job_id: "job-1"} - match: { datafeeds.0.job_id: "job-1"}
- do: - do:
xpack.ml.get_schedulers: xpack.ml.get_datafeeds:
scheduler_id: scheduler-2 datafeed_id: datafeed-2
- match: { schedulers.0.scheduler_id: "scheduler-2"} - match: { datafeeds.0.datafeed_id: "datafeed-2"}
- match: { schedulers.0.job_id: "job-2"} - match: { datafeeds.0.job_id: "job-2"}
--- ---
"Test get all schedulers": "Test get all datafeeds":
- do: - do:
xpack.ml.get_schedulers: xpack.ml.get_datafeeds:
scheduler_id: _all datafeed_id: _all
- match: { count: 2 } - match: { count: 2 }
- match: { schedulers.0.scheduler_id: "scheduler-1"} - match: { datafeeds.0.datafeed_id: "datafeed-1"}
- match: { schedulers.0.job_id: "job-1"} - match: { datafeeds.0.job_id: "job-1"}
- match: { schedulers.1.scheduler_id: "scheduler-2"} - match: { datafeeds.1.datafeed_id: "datafeed-2"}
- match: { schedulers.1.job_id: "job-2"} - match: { datafeeds.1.job_id: "job-2"}

View File

@ -32,8 +32,8 @@ setup:
} }
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: scheduler-1 datafeed_id: datafeed-1
body: > body: >
{ {
"job_id":"job-1", "job_id":"job-1",
@ -42,8 +42,8 @@ setup:
} }
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: scheduler-2 datafeed_id: datafeed-2
body: > body: >
{ {
"job_id":"job-2", "job_id":"job-2",
@ -52,36 +52,36 @@ setup:
} }
--- ---
"Test get scheduler stats given missing scheduler_id": "Test get datafeed stats given missing datafeed_id":
- do: - do:
catch: missing catch: missing
xpack.ml.get_schedulers_stats: xpack.ml.get_datafeeds_stats:
scheduler_id: missing-scheduler datafeed_id: missing-datafeed
--- ---
"Test get single scheduler stats": "Test get single datafeed stats":
- do: - do:
xpack.ml.get_schedulers_stats: xpack.ml.get_datafeeds_stats:
scheduler_id: scheduler-1 datafeed_id: datafeed-1
- match: { schedulers.0.scheduler_id: "scheduler-1"} - match: { datafeeds.0.datafeed_id: "datafeed-1"}
- match: { schedulers.0.status: "STOPPED"} - match: { datafeeds.0.status: "STOPPED"}
- do: - do:
xpack.ml.get_schedulers_stats: xpack.ml.get_datafeeds_stats:
scheduler_id: scheduler-2 datafeed_id: datafeed-2
- match: { schedulers.0.scheduler_id: "scheduler-2"} - match: { datafeeds.0.datafeed_id: "datafeed-2"}
- match: { schedulers.0.status: "STOPPED"} - match: { datafeeds.0.status: "STOPPED"}
--- ---
"Test get all schedulers stats": "Test get all datafeeds stats":
- do: - do:
xpack.ml.get_schedulers_stats: xpack.ml.get_datafeeds_stats:
scheduler_id: _all datafeed_id: _all
- match: { count: 2 } - match: { count: 2 }
- match: { schedulers.0.scheduler_id: "scheduler-1"} - match: { datafeeds.0.datafeed_id: "datafeed-1"}
- match: { schedulers.0.status: "STOPPED"} - match: { datafeeds.0.status: "STOPPED"}
- match: { schedulers.1.scheduler_id: "scheduler-2"} - match: { datafeeds.1.datafeed_id: "datafeed-2"}
- match: { schedulers.1.status: "STOPPED"} - match: { datafeeds.1.status: "STOPPED"}

View File

@ -182,13 +182,13 @@
} }
--- ---
"Test delete job that is referred by a scheduler": "Test delete job that is referred by a datafeed":
- do: - do:
xpack.ml.put_job: xpack.ml.put_job:
job_id: scheduler-job job_id: datafeed-job
body: > body: >
{ {
"job_id":"scheduler-job", "job_id":"datafeed-job",
"description":"Analysis of response time by airline", "description":"Analysis of response time by airline",
"analysis_config" : { "analysis_config" : {
"bucket_span":3600, "bucket_span":3600,
@ -200,20 +200,20 @@
"time_format":"yyyy-MM-dd HH:mm:ssX" "time_format":"yyyy-MM-dd HH:mm:ssX"
} }
} }
- match: { job_id: "scheduler-job" } - match: { job_id: "datafeed-job" }
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: test-scheduler-1 datafeed_id: test-datafeed-1
body: > body: >
{ {
"job_id":"scheduler-job", "job_id":"datafeed-job",
"indexes":["index-foo"], "indexes":["index-foo"],
"types":["type-bar"] "types":["type-bar"]
} }
- match: { scheduler_id: "test-scheduler-1" } - match: { datafeed_id: "test-datafeed-1" }
- do: - do:
catch: /Cannot delete job \[scheduler-job\] while scheduler \[test-scheduler-1\] refers to it/ catch: /Cannot delete job \[datafeed-job\] while datafeed \[test-datafeed-1\] refers to it/
xpack.ml.delete_job: xpack.ml.delete_job:
job_id: scheduler-job job_id: datafeed-job

View File

@ -23,11 +23,11 @@ setup:
- do: - do:
xpack.ml.put_job: xpack.ml.put_job:
job_id: scheduled-job job_id: datafeed-job
body: > body: >
{ {
"job_id":"scheduled-job", "job_id":"datafeed-job",
"description":"A job with a scheduler", "description":"A job with a datafeed",
"analysis_config" : { "analysis_config" : {
"bucket_span":3600, "bucket_span":3600,
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
@ -40,14 +40,14 @@ setup:
} }
- do: - do:
xpack.ml.open_job: xpack.ml.open_job:
job_id: scheduled-job job_id: datafeed-job
- do: - do:
xpack.ml.put_scheduler: xpack.ml.put_datafeed:
scheduler_id: scheduler-1 datafeed_id: datafeed-1
body: > body: >
{ {
"job_id":"scheduled-job", "job_id":"datafeed-job",
"indexes":["farequote"], "indexes":["farequote"],
"types":["response"] "types":["response"]
} }
@ -93,12 +93,12 @@ setup:
- match: { jobs.0.status: OPENED } - match: { jobs.0.status: OPENED }
--- ---
"Test get job stats of scheduled job that has not received and data": "Test get job stats of datafeed job that has not received and data":
- do: - do:
xpack.ml.get_job_stats: xpack.ml.get_job_stats:
job_id: scheduled-job job_id: datafeed-job
- match: { jobs.0.job_id : scheduled-job } - match: { jobs.0.job_id : datafeed-job }
- match: { jobs.0.data_counts.processed_record_count: 0 } - match: { jobs.0.data_counts.processed_record_count: 0 }
- is_false: jobs.0.model_size_stats - is_false: jobs.0.model_size_stats
- match: { jobs.0.status: OPENED } - match: { jobs.0.status: OPENED }

View File

@ -0,0 +1,116 @@
setup:
- do:
indices.create:
index: airline-data
body:
mappings:
response:
properties:
time:
type: date
- do:
xpack.ml.put_job:
job_id: datafeed-job
body: >
{
"job_id":"datafeed-job",
"description":"Analysis of response time by airline",
"analysis_config" : {
"bucket_span":3600,
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
},
"data_description" : {
"format":"JSON",
"time_field":"time",
"time_format":"epoch"
}
}
- do:
xpack.ml.put_datafeed:
datafeed_id: datafeed-1
body: >
{
"job_id":"datafeed-job",
"indexes":"airline-data",
"types":"response"
}
---
"Test start and stop datafeed happy path":
- do:
xpack.ml.open_job:
job_id: "datafeed-job"
- do:
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
- do:
xpack.ml.get_datafeeds_stats:
datafeed_id: "datafeed-1"
- match: { datafeeds.0.status: STARTED }
- do:
xpack.ml.stop_datafeed:
"datafeed_id": "datafeed-1"
- do:
xpack.ml.get_datafeeds_stats:
datafeed_id: "datafeed-1"
- match: { datafeeds.0.status: STOPPED }
---
"Test start non existing datafeed":
- do:
catch: missing
xpack.ml.start_datafeed:
"datafeed_id": "non-existing-datafeed"
"start": 0
---
"Test start datafeed job, but not open":
- do:
catch: conflict
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
- do:
catch: /cannot start datafeed, expected job status \[OPENED\], but got \[CLOSED\]/
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
---
"Test start already started datafeed job":
- do:
xpack.ml.open_job:
job_id: "datafeed-job"
- do:
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
- do:
catch: conflict
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
- do:
catch: /datafeed already started, expected datafeed status \[STOPPED\], but got \[STARTED\]/
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
---
"Test stop non existing datafeed":
- do:
catch: missing
xpack.ml.stop_datafeed:
"datafeed_id": "non-existing-datafeed"
---
"Test stop already stopped datafeed job":
- do:
catch: conflict
xpack.ml.stop_datafeed:
"datafeed_id": "datafeed-1"
- do:
catch: /datafeed already stopped, expected datafeed status \[STARTED\], but got \[STOPPED\]/
xpack.ml.stop_datafeed:
"datafeed_id": "datafeed-1"

View File

@ -1,116 +0,0 @@
setup:
- do:
indices.create:
index: airline-data
body:
mappings:
response:
properties:
time:
type: date
- do:
xpack.ml.put_job:
job_id: scheduled-job
body: >
{
"job_id":"scheduled-job",
"description":"Analysis of response time by airline",
"analysis_config" : {
"bucket_span":3600,
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
},
"data_description" : {
"format":"JSON",
"time_field":"time",
"time_format":"epoch"
}
}
- do:
xpack.ml.put_scheduler:
scheduler_id: scheduler-1
body: >
{
"job_id":"scheduled-job",
"indexes":"airline-data",
"types":"response"
}
---
"Test start and stop scheduler happy path":
- do:
xpack.ml.open_job:
job_id: "scheduled-job"
- do:
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
- do:
xpack.ml.get_schedulers_stats:
scheduler_id: "scheduler-1"
- match: { schedulers.0.status: STARTED }
- do:
xpack.ml.stop_scheduler:
"scheduler_id": "scheduler-1"
- do:
xpack.ml.get_schedulers_stats:
scheduler_id: "scheduler-1"
- match: { schedulers.0.status: STOPPED }
---
"Test start non existing scheduler":
- do:
catch: missing
xpack.ml.start_scheduler:
"scheduler_id": "non-existing-scheduler"
"start": 0
---
"Test start scheduled job, but not open":
- do:
catch: conflict
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
- do:
catch: /cannot start scheduler, expected job status \[OPENED\], but got \[CLOSED\]/
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
---
"Test start already started scheduled job":
- do:
xpack.ml.open_job:
job_id: "scheduled-job"
- do:
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
- do:
catch: conflict
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
- do:
catch: /scheduler already started, expected scheduler status \[STOPPED\], but got \[STARTED\]/
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
---
"Test stop non existing scheduler":
- do:
catch: missing
xpack.ml.stop_scheduler:
"scheduler_id": "non-existing-scheduler"
---
"Test stop already stopped scheduled job":
- do:
catch: conflict
xpack.ml.stop_scheduler:
"scheduler_id": "scheduler-1"
- do:
catch: /scheduler already stopped, expected scheduler status \[STARTED\], but got \[STOPPED\]/
xpack.ml.stop_scheduler:
"scheduler_id": "scheduler-1"