Rename scheduler/scheduled to datafeed (elastic/elasticsearch#755)

Relates elastic/elasticsearch#630

The more subtle changes to the datafeed endpoints required by elastic/elasticsearch#630
are NOT in this commit, as they would be drowned out by the rename

Original commit: elastic/x-pack-elasticsearch@3318971da9
This commit is contained in:
David Roberts 2017-01-19 13:44:19 +00:00 committed by GitHub
parent 10441a3e38
commit 36bdcaff5d
90 changed files with 1612 additions and 1612 deletions

View File

@ -31,7 +31,7 @@ import org.elasticsearch.xpack.ml.action.CloseJobAction;
import org.elasticsearch.xpack.ml.action.DeleteJobAction;
import org.elasticsearch.xpack.ml.action.DeleteListAction;
import org.elasticsearch.xpack.ml.action.DeleteModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.DeleteSchedulerAction;
import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction;
import org.elasticsearch.xpack.ml.action.FlushJobAction;
import org.elasticsearch.xpack.ml.action.GetBucketsAction;
import org.elasticsearch.xpack.ml.action.GetCategoriesAction;
@ -41,20 +41,20 @@ import org.elasticsearch.xpack.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.ml.action.GetListAction;
import org.elasticsearch.xpack.ml.action.GetModelSnapshotsAction;
import org.elasticsearch.xpack.ml.action.GetRecordsAction;
import org.elasticsearch.xpack.ml.action.GetSchedulersAction;
import org.elasticsearch.xpack.ml.action.GetSchedulersStatsAction;
import org.elasticsearch.xpack.ml.action.InternalStartSchedulerAction;
import org.elasticsearch.xpack.ml.action.GetDatafeedsAction;
import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction;
import org.elasticsearch.xpack.ml.action.InternalStartDatafeedAction;
import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PostDataAction;
import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.PutListAction;
import org.elasticsearch.xpack.ml.action.PutSchedulerAction;
import org.elasticsearch.xpack.ml.action.PutDatafeedAction;
import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.StartSchedulerAction;
import org.elasticsearch.xpack.ml.action.StopSchedulerAction;
import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.ml.action.StopDatafeedAction;
import org.elasticsearch.xpack.ml.action.UpdateJobStatusAction;
import org.elasticsearch.xpack.ml.action.UpdateModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction;
import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction;
import org.elasticsearch.xpack.ml.action.ValidateDetectorAction;
import org.elasticsearch.xpack.ml.action.ValidateTransformAction;
import org.elasticsearch.xpack.ml.action.ValidateTransformsAction;
@ -101,16 +101,16 @@ import org.elasticsearch.xpack.ml.rest.results.RestGetBucketsAction;
import org.elasticsearch.xpack.ml.rest.results.RestGetCategoriesAction;
import org.elasticsearch.xpack.ml.rest.results.RestGetInfluencersAction;
import org.elasticsearch.xpack.ml.rest.results.RestGetRecordsAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestDeleteSchedulerAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestGetSchedulersAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestGetSchedulersStatsAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestPutSchedulerAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestStartSchedulerAction;
import org.elasticsearch.xpack.ml.rest.schedulers.RestStopSchedulerAction;
import org.elasticsearch.xpack.ml.rest.datafeeds.RestDeleteDatafeedAction;
import org.elasticsearch.xpack.ml.rest.datafeeds.RestGetDatafeedsAction;
import org.elasticsearch.xpack.ml.rest.datafeeds.RestGetDatafeedsStatsAction;
import org.elasticsearch.xpack.ml.rest.datafeeds.RestPutDatafeedAction;
import org.elasticsearch.xpack.ml.rest.datafeeds.RestStartDatafeedAction;
import org.elasticsearch.xpack.ml.rest.datafeeds.RestStopDatafeedAction;
import org.elasticsearch.xpack.ml.rest.validate.RestValidateDetectorAction;
import org.elasticsearch.xpack.ml.rest.validate.RestValidateTransformAction;
import org.elasticsearch.xpack.ml.rest.validate.RestValidateTransformsAction;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunner;
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
import org.elasticsearch.xpack.ml.utils.NamedPipeHelper;
import java.io.IOException;
@ -124,7 +124,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
public static final String NAME = "ml";
public static final String BASE_PATH = "/_xpack/ml/";
public static final String THREAD_POOL_NAME = NAME;
public static final String SCHEDULED_RUNNER_THREAD_POOL_NAME = NAME + "_scheduled_runner";
public static final String DATAFEED_RUNNER_THREAD_POOL_NAME = NAME + "_datafeed_runner";
public static final String AUTODETECT_PROCESS_THREAD_POOL_NAME = NAME + "_autodetect_process";
// NORELEASE - temporary solution
@ -199,7 +199,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
DataProcessor dataProcessor = new AutodetectProcessManager(settings, client, threadPool, jobManager, jobProvider,
jobResultsPersister, jobRenormalizedResultsPersister, jobDataCountsPersister, autodetectResultsParser,
autodetectProcessFactory, normalizerFactory);
ScheduledJobRunner scheduledJobRunner = new ScheduledJobRunner(threadPool, client, clusterService, jobProvider,
DatafeedJobRunner datafeedJobRunner = new DatafeedJobRunner(threadPool, client, clusterService, jobProvider,
System::currentTimeMillis);
JobLifeCycleService jobLifeCycleService =
@ -225,7 +225,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
dataProcessor,
new MlInitializationService(settings, threadPool, clusterService, jobProvider),
jobDataCountsPersister,
scheduledJobRunner
datafeedJobRunner
);
}
@ -253,12 +253,12 @@ public class MlPlugin extends Plugin implements ActionPlugin {
RestGetModelSnapshotsAction.class,
RestRevertModelSnapshotAction.class,
RestUpdateModelSnapshotAction.class,
RestGetSchedulersAction.class,
RestGetSchedulersStatsAction.class,
RestPutSchedulerAction.class,
RestDeleteSchedulerAction.class,
RestStartSchedulerAction.class,
RestStopSchedulerAction.class,
RestGetDatafeedsAction.class,
RestGetDatafeedsStatsAction.class,
RestPutDatafeedAction.class,
RestDeleteDatafeedAction.class,
RestStartDatafeedAction.class,
RestStopDatafeedAction.class,
RestDeleteModelSnapshotAction.class
);
}
@ -272,7 +272,7 @@ public class MlPlugin extends Plugin implements ActionPlugin {
new ActionHandler<>(DeleteJobAction.INSTANCE, DeleteJobAction.TransportAction.class),
new ActionHandler<>(OpenJobAction.INSTANCE, OpenJobAction.TransportAction.class),
new ActionHandler<>(UpdateJobStatusAction.INSTANCE, UpdateJobStatusAction.TransportAction.class),
new ActionHandler<>(UpdateSchedulerStatusAction.INSTANCE, UpdateSchedulerStatusAction.TransportAction.class),
new ActionHandler<>(UpdateDatafeedStatusAction.INSTANCE, UpdateDatafeedStatusAction.TransportAction.class),
new ActionHandler<>(GetListAction.INSTANCE, GetListAction.TransportAction.class),
new ActionHandler<>(PutListAction.INSTANCE, PutListAction.TransportAction.class),
new ActionHandler<>(DeleteListAction.INSTANCE, DeleteListAction.TransportAction.class),
@ -289,13 +289,13 @@ public class MlPlugin extends Plugin implements ActionPlugin {
new ActionHandler<>(GetModelSnapshotsAction.INSTANCE, GetModelSnapshotsAction.TransportAction.class),
new ActionHandler<>(RevertModelSnapshotAction.INSTANCE, RevertModelSnapshotAction.TransportAction.class),
new ActionHandler<>(UpdateModelSnapshotAction.INSTANCE, UpdateModelSnapshotAction.TransportAction.class),
new ActionHandler<>(GetSchedulersAction.INSTANCE, GetSchedulersAction.TransportAction.class),
new ActionHandler<>(GetSchedulersStatsAction.INSTANCE, GetSchedulersStatsAction.TransportAction.class),
new ActionHandler<>(PutSchedulerAction.INSTANCE, PutSchedulerAction.TransportAction.class),
new ActionHandler<>(DeleteSchedulerAction.INSTANCE, DeleteSchedulerAction.TransportAction.class),
new ActionHandler<>(StartSchedulerAction.INSTANCE, StartSchedulerAction.TransportAction.class),
new ActionHandler<>(InternalStartSchedulerAction.INSTANCE, InternalStartSchedulerAction.TransportAction.class),
new ActionHandler<>(StopSchedulerAction.INSTANCE, StopSchedulerAction.TransportAction.class),
new ActionHandler<>(GetDatafeedsAction.INSTANCE, GetDatafeedsAction.TransportAction.class),
new ActionHandler<>(GetDatafeedsStatsAction.INSTANCE, GetDatafeedsStatsAction.TransportAction.class),
new ActionHandler<>(PutDatafeedAction.INSTANCE, PutDatafeedAction.TransportAction.class),
new ActionHandler<>(DeleteDatafeedAction.INSTANCE, DeleteDatafeedAction.TransportAction.class),
new ActionHandler<>(StartDatafeedAction.INSTANCE, StartDatafeedAction.TransportAction.class),
new ActionHandler<>(InternalStartDatafeedAction.INSTANCE, InternalStartDatafeedAction.TransportAction.class),
new ActionHandler<>(StopDatafeedAction.INSTANCE, StopDatafeedAction.TransportAction.class),
new ActionHandler<>(DeleteModelSnapshotAction.INSTANCE, DeleteModelSnapshotAction.TransportAction.class)
);
}
@ -310,15 +310,15 @@ public class MlPlugin extends Plugin implements ActionPlugin {
FixedExecutorBuilder ml = new FixedExecutorBuilder(settings, THREAD_POOL_NAME,
maxNumberOfJobs * 2, 1000, "xpack.ml.thread_pool");
// fail quick to run autodetect process / scheduler, so no queues
// fail quick to run autodetect process / datafeed, so no queues
// 4 threads: for c++ logging, result processing, state processing and restore state
FixedExecutorBuilder autoDetect = new FixedExecutorBuilder(settings, AUTODETECT_PROCESS_THREAD_POOL_NAME,
maxNumberOfJobs * 4, 4, "xpack.ml.autodetect_process_thread_pool");
// TODO: if scheduled and non scheduled jobs are considered more equal and the scheduler and
// TODO: if datafeed and non datafeed jobs are considered more equal and the datafeed and
// autodetect process are created at the same time then these two different TPs can merge.
FixedExecutorBuilder scheduler = new FixedExecutorBuilder(settings, SCHEDULED_RUNNER_THREAD_POOL_NAME,
maxNumberOfJobs, 1, "xpack.ml.scheduler_thread_pool");
return Arrays.asList(ml, autoDetect, scheduler);
FixedExecutorBuilder datafeed = new FixedExecutorBuilder(settings, DATAFEED_RUNNER_THREAD_POOL_NAME,
maxNumberOfJobs, 1, "xpack.ml.datafeed_thread_pool");
return Arrays.asList(ml, autoDetect, datafeed);
}
}

View File

@ -29,20 +29,20 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Objects;
public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request, DeleteSchedulerAction.Response,
DeleteSchedulerAction.RequestBuilder> {
public class DeleteDatafeedAction extends Action<DeleteDatafeedAction.Request, DeleteDatafeedAction.Response,
DeleteDatafeedAction.RequestBuilder> {
public static final DeleteSchedulerAction INSTANCE = new DeleteSchedulerAction();
public static final String NAME = "cluster:admin/ml/scheduler/delete";
public static final DeleteDatafeedAction INSTANCE = new DeleteDatafeedAction();
public static final String NAME = "cluster:admin/ml/datafeed/delete";
private DeleteSchedulerAction() {
private DeleteDatafeedAction() {
super(NAME);
}
@ -58,17 +58,17 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
private String schedulerId;
private String datafeedId;
public Request(String schedulerId) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName());
public Request(String datafeedId) {
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
}
Request() {
}
public String getSchedulerId() {
return schedulerId;
public String getDatafeedId() {
return datafeedId;
}
@Override
@ -79,18 +79,18 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
schedulerId = in.readString();
datafeedId = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(schedulerId);
out.writeString(datafeedId);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(SchedulerConfig.ID.getPreferredName(), schedulerId);
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
return builder;
}
@ -99,18 +99,18 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(schedulerId, request.schedulerId);
return Objects.equals(datafeedId, request.datafeedId);
}
@Override
public int hashCode() {
return Objects.hash(schedulerId);
return Objects.hash(datafeedId);
}
}
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, DeleteSchedulerAction action) {
public RequestBuilder(ElasticsearchClient client, DeleteDatafeedAction action) {
super(client, action, new Request());
}
}
@ -142,7 +142,7 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
@Inject
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, DeleteSchedulerAction.NAME, transportService, clusterService, threadPool, actionFilters,
super(settings, DeleteDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new);
}
@ -158,7 +158,7 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
@Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
clusterService.submitStateUpdateTask("delete-scheduler-" + request.getSchedulerId(),
clusterService.submitStateUpdateTask("delete-datafeed-" + request.getDatafeedId(),
new AckedClusterStateUpdateTask<Response>(request, listener) {
@Override
@ -170,7 +170,7 @@ public class DeleteSchedulerAction extends Action<DeleteSchedulerAction.Request,
public ClusterState execute(ClusterState currentState) throws Exception {
MlMetadata currentMetadata = state.getMetaData().custom(MlMetadata.TYPE);
MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata)
.removeScheduler(request.getSchedulerId()).build();
.removeDatafeed(request.getDatafeedId()).build();
return ClusterState.builder(state).metaData(
MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build())
.build();

View File

@ -30,8 +30,8 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException;
@ -40,15 +40,15 @@ import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class GetSchedulersAction extends Action<GetSchedulersAction.Request, GetSchedulersAction.Response,
GetSchedulersAction.RequestBuilder> {
public class GetDatafeedsAction extends Action<GetDatafeedsAction.Request, GetDatafeedsAction.Response,
GetDatafeedsAction.RequestBuilder> {
public static final GetSchedulersAction INSTANCE = new GetSchedulersAction();
public static final String NAME = "cluster:admin/ml/schedulers/get";
public static final GetDatafeedsAction INSTANCE = new GetDatafeedsAction();
public static final String NAME = "cluster:admin/ml/datafeeds/get";
private static final String ALL = "_all";
private GetSchedulersAction() {
private GetDatafeedsAction() {
super(NAME);
}
@ -64,16 +64,16 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
public static class Request extends MasterNodeReadRequest<Request> {
private String schedulerId;
private String datafeedId;
public Request(String schedulerId) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName());
public Request(String datafeedId) {
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
}
Request() {}
public String getSchedulerId() {
return schedulerId;
public String getDatafeedId() {
return datafeedId;
}
@Override
@ -84,18 +84,18 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
schedulerId = in.readString();
datafeedId = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(schedulerId);
out.writeString(datafeedId);
}
@Override
public int hashCode() {
return Objects.hash(schedulerId);
return Objects.hash(datafeedId);
}
@Override
@ -107,54 +107,54 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
return false;
}
Request other = (Request) obj;
return Objects.equals(schedulerId, other.schedulerId);
return Objects.equals(datafeedId, other.datafeedId);
}
}
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, GetSchedulersAction action) {
public RequestBuilder(ElasticsearchClient client, GetDatafeedsAction action) {
super(client, action, new Request());
}
}
public static class Response extends ActionResponse implements ToXContentObject {
private QueryPage<SchedulerConfig> schedulers;
private QueryPage<DatafeedConfig> datafeeds;
public Response(QueryPage<SchedulerConfig> schedulers) {
this.schedulers = schedulers;
public Response(QueryPage<DatafeedConfig> datafeeds) {
this.datafeeds = datafeeds;
}
public Response() {}
public QueryPage<SchedulerConfig> getResponse() {
return schedulers;
public QueryPage<DatafeedConfig> getResponse() {
return datafeeds;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
schedulers = new QueryPage<>(in, SchedulerConfig::new);
datafeeds = new QueryPage<>(in, DatafeedConfig::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
schedulers.writeTo(out);
datafeeds.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
schedulers.doXContentBody(builder, params);
datafeeds.doXContentBody(builder, params);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(schedulers);
return Objects.hash(datafeeds);
}
@Override
@ -166,7 +166,7 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
return false;
}
Response other = (Response) obj;
return Objects.equals(schedulers, other.schedulers);
return Objects.equals(datafeeds, other.datafeeds);
}
@SuppressWarnings("deprecation")
@ -191,7 +191,7 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
@Inject
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, GetSchedulersAction.NAME, transportService, clusterService, threadPool, actionFilters,
super(settings, GetDatafeedsAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new);
}
@ -207,20 +207,20 @@ public class GetSchedulersAction extends Action<GetSchedulersAction.Request, Get
@Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
logger.debug("Get scheduler '{}'", request.getSchedulerId());
logger.debug("Get datafeed '{}'", request.getDatafeedId());
QueryPage<SchedulerConfig> response = null;
QueryPage<DatafeedConfig> response = null;
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
if (ALL.equals(request.getSchedulerId())) {
List<SchedulerConfig> schedulerConfigs = mlMetadata.getSchedulers().values().stream().map(
if (ALL.equals(request.getDatafeedId())) {
List<DatafeedConfig> datafeedConfigs = mlMetadata.getDatafeeds().values().stream().map(
s -> s.getConfig()).collect(Collectors.toList());
response = new QueryPage<>(schedulerConfigs, schedulerConfigs.size(), Scheduler.RESULTS_FIELD);
response = new QueryPage<>(datafeedConfigs, datafeedConfigs.size(), Datafeed.RESULTS_FIELD);
} else {
Scheduler scheduler = mlMetadata.getScheduler(request.getSchedulerId());
if (scheduler == null) {
throw ExceptionsHelper.missingSchedulerException(request.getSchedulerId());
Datafeed datafeed = mlMetadata.getDatafeed(request.getDatafeedId());
if (datafeed == null) {
throw ExceptionsHelper.missingDatafeedException(request.getDatafeedId());
}
response = new QueryPage<>(Collections.singletonList(scheduler.getConfig()), 1, Scheduler.RESULTS_FIELD);
response = new QueryPage<>(Collections.singletonList(datafeed.getConfig()), 1, Datafeed.RESULTS_FIELD);
}
listener.onResponse(new Response(response));

View File

@ -32,9 +32,9 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException;
@ -43,16 +43,16 @@ import java.util.Collection;
import java.util.List;
import java.util.Objects;
public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Request, GetSchedulersStatsAction.Response,
GetSchedulersStatsAction.RequestBuilder> {
public class GetDatafeedsStatsAction extends Action<GetDatafeedsStatsAction.Request, GetDatafeedsStatsAction.Response,
GetDatafeedsStatsAction.RequestBuilder> {
public static final GetSchedulersStatsAction INSTANCE = new GetSchedulersStatsAction();
public static final String NAME = "cluster:admin/ml/schedulers/stats/get";
public static final GetDatafeedsStatsAction INSTANCE = new GetDatafeedsStatsAction();
public static final String NAME = "cluster:admin/ml/datafeeds/stats/get";
private static final String ALL = "_all";
private static final String STATUS = "status";
private GetSchedulersStatsAction() {
private GetDatafeedsStatsAction() {
super(NAME);
}
@ -68,16 +68,16 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
public static class Request extends MasterNodeReadRequest<Request> {
private String schedulerId;
private String datafeedId;
public Request(String schedulerId) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName());
public Request(String datafeedId) {
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
}
Request() {}
public String getSchedulerId() {
return schedulerId;
public String getDatafeedId() {
return datafeedId;
}
@Override
@ -88,18 +88,18 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
schedulerId = in.readString();
datafeedId = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(schedulerId);
out.writeString(datafeedId);
}
@Override
public int hashCode() {
return Objects.hash(schedulerId);
return Objects.hash(datafeedId);
}
@Override
@ -111,47 +111,47 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
return false;
}
Request other = (Request) obj;
return Objects.equals(schedulerId, other.schedulerId);
return Objects.equals(datafeedId, other.datafeedId);
}
}
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, GetSchedulersStatsAction action) {
public RequestBuilder(ElasticsearchClient client, GetDatafeedsStatsAction action) {
super(client, action, new Request());
}
}
public static class Response extends ActionResponse implements ToXContentObject {
public static class SchedulerStats implements ToXContent, Writeable {
public static class DatafeedStats implements ToXContent, Writeable {
private final String schedulerId;
private final SchedulerStatus schedulerStatus;
private final String datafeedId;
private final DatafeedStatus datafeedStatus;
SchedulerStats(String schedulerId, SchedulerStatus schedulerStatus) {
this.schedulerId = Objects.requireNonNull(schedulerId);
this.schedulerStatus = Objects.requireNonNull(schedulerStatus);
DatafeedStats(String datafeedId, DatafeedStatus datafeedStatus) {
this.datafeedId = Objects.requireNonNull(datafeedId);
this.datafeedStatus = Objects.requireNonNull(datafeedStatus);
}
SchedulerStats(StreamInput in) throws IOException {
schedulerId = in.readString();
schedulerStatus = SchedulerStatus.fromStream(in);
DatafeedStats(StreamInput in) throws IOException {
datafeedId = in.readString();
datafeedStatus = DatafeedStatus.fromStream(in);
}
public String getSchedulerId() {
return schedulerId;
public String getDatafeedId() {
return datafeedId;
}
public SchedulerStatus getSchedulerStatus() {
return schedulerStatus;
public DatafeedStatus getDatafeedStatus() {
return datafeedStatus;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(SchedulerConfig.ID.getPreferredName(), schedulerId);
builder.field(STATUS, schedulerStatus);
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
builder.field(STATUS, datafeedStatus);
builder.endObject();
return builder;
@ -159,13 +159,13 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(schedulerId);
schedulerStatus.writeTo(out);
out.writeString(datafeedId);
datafeedStatus.writeTo(out);
}
@Override
public int hashCode() {
return Objects.hash(schedulerId, schedulerStatus);
return Objects.hash(datafeedId, datafeedStatus);
}
@Override
@ -176,46 +176,46 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
if (getClass() != obj.getClass()) {
return false;
}
GetSchedulersStatsAction.Response.SchedulerStats other = (GetSchedulersStatsAction.Response.SchedulerStats) obj;
return Objects.equals(schedulerId, other.schedulerId) && Objects.equals(this.schedulerStatus, other.schedulerStatus);
GetDatafeedsStatsAction.Response.DatafeedStats other = (GetDatafeedsStatsAction.Response.DatafeedStats) obj;
return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(this.datafeedStatus, other.datafeedStatus);
}
}
private QueryPage<SchedulerStats> schedulersStats;
private QueryPage<DatafeedStats> datafeedsStats;
public Response(QueryPage<SchedulerStats> schedulersStats) {
this.schedulersStats = schedulersStats;
public Response(QueryPage<DatafeedStats> datafeedsStats) {
this.datafeedsStats = datafeedsStats;
}
public Response() {}
public QueryPage<SchedulerStats> getResponse() {
return schedulersStats;
public QueryPage<DatafeedStats> getResponse() {
return datafeedsStats;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
schedulersStats = new QueryPage<>(in, SchedulerStats::new);
datafeedsStats = new QueryPage<>(in, DatafeedStats::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
schedulersStats.writeTo(out);
datafeedsStats.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
schedulersStats.doXContentBody(builder, params);
datafeedsStats.doXContentBody(builder, params);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(schedulersStats);
return Objects.hash(datafeedsStats);
}
@Override
@ -227,7 +227,7 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
return false;
}
Response other = (Response) obj;
return Objects.equals(schedulersStats, other.schedulersStats);
return Objects.equals(datafeedsStats, other.datafeedsStats);
}
@SuppressWarnings("deprecation")
@ -252,7 +252,7 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
@Inject
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, GetSchedulersStatsAction.NAME, transportService, clusterService, threadPool, actionFilters,
super(settings, GetDatafeedsStatsAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new);
}
@ -268,24 +268,24 @@ public class GetSchedulersStatsAction extends Action<GetSchedulersStatsAction.Re
@Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
logger.debug("Get stats for scheduler '{}'", request.getSchedulerId());
logger.debug("Get stats for datafeed '{}'", request.getDatafeedId());
List<Response.SchedulerStats> stats = new ArrayList<>();
List<Response.DatafeedStats> stats = new ArrayList<>();
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
if (ALL.equals(request.getSchedulerId())) {
Collection<Scheduler> schedulers = mlMetadata.getSchedulers().values();
for (Scheduler scheduler : schedulers) {
stats.add(new Response.SchedulerStats(scheduler.getId(), scheduler.getStatus()));
if (ALL.equals(request.getDatafeedId())) {
Collection<Datafeed> datafeeds = mlMetadata.getDatafeeds().values();
for (Datafeed datafeed : datafeeds) {
stats.add(new Response.DatafeedStats(datafeed.getId(), datafeed.getStatus()));
}
} else {
Scheduler scheduler = mlMetadata.getScheduler(request.getSchedulerId());
if (scheduler == null) {
throw ExceptionsHelper.missingSchedulerException(request.getSchedulerId());
Datafeed datafeed = mlMetadata.getDatafeed(request.getDatafeedId());
if (datafeed == null) {
throw ExceptionsHelper.missingDatafeedException(request.getDatafeedId());
}
stats.add(new Response.SchedulerStats(scheduler.getId(), scheduler.getStatus()));
stats.add(new Response.DatafeedStats(datafeed.getId(), datafeed.getStatus()));
}
QueryPage<Response.SchedulerStats> statsPage = new QueryPage<>(stats, stats.size(), Scheduler.RESULTS_FIELD);
QueryPage<Response.DatafeedStats> statsPage = new QueryPage<>(stats, stats.size(), Datafeed.RESULTS_FIELD);
listener.onResponse(new Response(statsPage));
}

View File

@ -20,15 +20,15 @@ import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunner;
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
public class InternalStartSchedulerAction extends
Action<InternalStartSchedulerAction.Request, InternalStartSchedulerAction.Response, InternalStartSchedulerAction.RequestBuilder> {
public class InternalStartDatafeedAction extends
Action<InternalStartDatafeedAction.Request, InternalStartDatafeedAction.Response, InternalStartDatafeedAction.RequestBuilder> {
public static final InternalStartSchedulerAction INSTANCE = new InternalStartSchedulerAction();
public static final String NAME = "cluster:admin/ml/scheduler/internal_start";
public static final InternalStartDatafeedAction INSTANCE = new InternalStartDatafeedAction();
public static final String NAME = "cluster:admin/ml/datafeed/internal_start";
private InternalStartSchedulerAction() {
private InternalStartDatafeedAction() {
super(NAME);
}
@ -42,10 +42,10 @@ public class InternalStartSchedulerAction extends
return new Response();
}
public static class Request extends StartSchedulerAction.Request {
public static class Request extends StartDatafeedAction.Request {
Request(String schedulerId, long startTime) {
super(schedulerId, startTime);
Request(String datafeedId, long startTime) {
super(datafeedId, startTime);
}
Request() {
@ -53,13 +53,13 @@ public class InternalStartSchedulerAction extends
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new SchedulerTask(id, type, action, parentTaskId, getSchedulerId());
return new DatafeedTask(id, type, action, parentTaskId, getDatafeedId());
}
}
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, InternalStartSchedulerAction action) {
public RequestBuilder(ElasticsearchClient client, InternalStartDatafeedAction action) {
super(client, action, new Request());
}
}
@ -71,15 +71,15 @@ public class InternalStartSchedulerAction extends
}
public static class SchedulerTask extends CancellableTask {
public static class DatafeedTask extends CancellableTask {
private volatile ScheduledJobRunner.Holder holder;
private volatile DatafeedJobRunner.Holder holder;
public SchedulerTask(long id, String type, String action, TaskId parentTaskId, String schedulerId) {
super(id, type, action, "scheduler-" + schedulerId, parentTaskId);
public DatafeedTask(long id, String type, String action, TaskId parentTaskId, String datafeedId) {
super(id, type, action, "datafeed-" + datafeedId, parentTaskId);
}
public void setHolder(ScheduledJobRunner.Holder holder) {
public void setHolder(DatafeedJobRunner.Holder holder) {
this.holder = holder;
}
@ -98,21 +98,21 @@ public class InternalStartSchedulerAction extends
public static class TransportAction extends HandledTransportAction<Request, Response> {
private final ScheduledJobRunner scheduledJobRunner;
private final DatafeedJobRunner datafeedJobRunner;
@Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ScheduledJobRunner scheduledJobRunner) {
super(settings, InternalStartSchedulerAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
DatafeedJobRunner datafeedJobRunner) {
super(settings, InternalStartDatafeedAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
Request::new);
this.scheduledJobRunner = scheduledJobRunner;
this.datafeedJobRunner = datafeedJobRunner;
}
@Override
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
SchedulerTask schedulerTask = (SchedulerTask) task;
scheduledJobRunner.run(request.getSchedulerId(), request.getStartTime(), request.getEndTime(), schedulerTask, (error) -> {
DatafeedTask datafeedTask = (DatafeedTask) task;
datafeedJobRunner.run(request.getDatafeedId(), request.getStartTime(), request.getEndTime(), datafeedTask, (error) -> {
if (error != null) {
listener.onFailure(error);
} else {

View File

@ -32,17 +32,17 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException;
import java.util.Objects;
public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSchedulerAction.Response, PutSchedulerAction.RequestBuilder> {
public class PutDatafeedAction extends Action<PutDatafeedAction.Request, PutDatafeedAction.Response, PutDatafeedAction.RequestBuilder> {
public static final PutSchedulerAction INSTANCE = new PutSchedulerAction();
public static final String NAME = "cluster:admin/ml/scheduler/put";
public static final PutDatafeedAction INSTANCE = new PutDatafeedAction();
public static final String NAME = "cluster:admin/ml/datafeed/put";
private PutSchedulerAction() {
private PutDatafeedAction() {
super(NAME);
}
@ -58,23 +58,23 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
public static Request parseRequest(String schedulerId, XContentParser parser) {
SchedulerConfig.Builder scheduler = SchedulerConfig.PARSER.apply(parser, null);
scheduler.setId(schedulerId);
return new Request(scheduler.build());
public static Request parseRequest(String datafeedId, XContentParser parser) {
DatafeedConfig.Builder datafeed = DatafeedConfig.PARSER.apply(parser, null);
datafeed.setId(datafeedId);
return new Request(datafeed.build());
}
private SchedulerConfig scheduler;
private DatafeedConfig datafeed;
public Request(SchedulerConfig scheduler) {
this.scheduler = scheduler;
public Request(DatafeedConfig datafeed) {
this.datafeed = datafeed;
}
Request() {
}
public SchedulerConfig getScheduler() {
return scheduler;
public DatafeedConfig getDatafeed() {
return datafeed;
}
@Override
@ -85,18 +85,18 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
scheduler = new SchedulerConfig(in);
datafeed = new DatafeedConfig(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
scheduler.writeTo(out);
datafeed.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
scheduler.toXContent(builder, params);
datafeed.toXContent(builder, params);
return builder;
}
@ -105,56 +105,56 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(scheduler, request.scheduler);
return Objects.equals(datafeed, request.datafeed);
}
@Override
public int hashCode() {
return Objects.hash(scheduler);
return Objects.hash(datafeed);
}
}
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, PutSchedulerAction action) {
public RequestBuilder(ElasticsearchClient client, PutDatafeedAction action) {
super(client, action, new Request());
}
}
public static class Response extends AcknowledgedResponse implements ToXContentObject {
private SchedulerConfig scheduler;
private DatafeedConfig datafeed;
public Response(boolean acked, SchedulerConfig scheduler) {
public Response(boolean acked, DatafeedConfig datafeed) {
super(acked);
this.scheduler = scheduler;
this.datafeed = datafeed;
}
Response() {
}
public SchedulerConfig getResponse() {
return scheduler;
public DatafeedConfig getResponse() {
return datafeed;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
scheduler = new SchedulerConfig(in);
datafeed = new DatafeedConfig(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
scheduler.writeTo(out);
datafeed.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
scheduler.doXContentBody(builder, params);
datafeed.doXContentBody(builder, params);
builder.endObject();
return builder;
}
@ -164,12 +164,12 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Response response = (Response) o;
return Objects.equals(scheduler, response.scheduler);
return Objects.equals(datafeed, response.datafeed);
}
@Override
public int hashCode() {
return Objects.hash(scheduler);
return Objects.hash(datafeed);
}
}
@ -179,7 +179,7 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, PutSchedulerAction.NAME, transportService, clusterService, threadPool, actionFilters,
super(settings, PutDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new);
}
@ -195,28 +195,28 @@ public class PutSchedulerAction extends Action<PutSchedulerAction.Request, PutSc
@Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
clusterService.submitStateUpdateTask("put-scheduler-" + request.getScheduler().getId(),
clusterService.submitStateUpdateTask("put-datafeed-" + request.getDatafeed().getId(),
new AckedClusterStateUpdateTask<Response>(request, listener) {
@Override
protected Response newResponse(boolean acknowledged) {
if (acknowledged) {
logger.info("Created scheduler [{}]", request.getScheduler().getId());
logger.info("Created datafeed [{}]", request.getDatafeed().getId());
}
return new Response(acknowledged, request.getScheduler());
return new Response(acknowledged, request.getDatafeed());
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
return putScheduler(request, currentState);
return putDatafeed(request, currentState);
}
});
}
private ClusterState putScheduler(Request request, ClusterState clusterState) {
private ClusterState putDatafeed(Request request, ClusterState clusterState) {
MlMetadata currentMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE);
MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata)
.putScheduler(request.getScheduler()).build();
.putDatafeed(request.getDatafeed()).build();
return ClusterState.builder(clusterState).metaData(
MetaData.builder(clusterState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build())
.build();

View File

@ -31,26 +31,26 @@ import org.elasticsearch.tasks.LoggingTaskListener;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunner;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.utils.SchedulerStatusObserver;
import org.elasticsearch.xpack.ml.utils.DatafeedStatusObserver;
import java.io.IOException;
import java.util.Objects;
public class StartSchedulerAction
extends Action<StartSchedulerAction.Request, StartSchedulerAction.Response, StartSchedulerAction.RequestBuilder> {
public class StartDatafeedAction
extends Action<StartDatafeedAction.Request, StartDatafeedAction.Response, StartDatafeedAction.RequestBuilder> {
public static final ParseField START_TIME = new ParseField("start");
public static final ParseField END_TIME = new ParseField("end");
public static final ParseField START_TIMEOUT = new ParseField("start_timeout");
public static final StartSchedulerAction INSTANCE = new StartSchedulerAction();
public static final String NAME = "cluster:admin/ml/scheduler/start";
public static final StartDatafeedAction INSTANCE = new StartDatafeedAction();
public static final String NAME = "cluster:admin/ml/datafeed/start";
private StartSchedulerAction() {
private StartDatafeedAction() {
super(NAME);
}
@ -69,36 +69,36 @@ public class StartSchedulerAction
public static ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
static {
PARSER.declareString((request, schedulerId) -> request.schedulerId = schedulerId, SchedulerConfig.ID);
PARSER.declareString((request, datafeedId) -> request.datafeedId = datafeedId, DatafeedConfig.ID);
PARSER.declareLong((request, startTime) -> request.startTime = startTime, START_TIME);
PARSER.declareLong(Request::setEndTime, END_TIME);
PARSER.declareString((request, val) -> request.setStartTimeout(TimeValue.parseTimeValue(val,
START_TIME.getPreferredName())), START_TIMEOUT);
}
public static Request parseRequest(String schedulerId, XContentParser parser) {
public static Request parseRequest(String datafeedId, XContentParser parser) {
Request request = PARSER.apply(parser, null);
if (schedulerId != null) {
request.schedulerId = schedulerId;
if (datafeedId != null) {
request.datafeedId = datafeedId;
}
return request;
}
private String schedulerId;
private String datafeedId;
private long startTime;
private Long endTime;
private TimeValue startTimeout = TimeValue.timeValueSeconds(30);
public Request(String schedulerId, long startTime) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName());
public Request(String datafeedId, long startTime) {
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
this.startTime = startTime;
}
Request() {
}
public String getSchedulerId() {
return schedulerId;
public String getDatafeedId() {
return datafeedId;
}
public long getStartTime() {
@ -129,7 +129,7 @@ public class StartSchedulerAction
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
schedulerId = in.readString();
datafeedId = in.readString();
startTime = in.readVLong();
endTime = in.readOptionalLong();
startTimeout = new TimeValue(in.readVLong());
@ -138,7 +138,7 @@ public class StartSchedulerAction
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(schedulerId);
out.writeString(datafeedId);
out.writeVLong(startTime);
out.writeOptionalLong(endTime);
out.writeVLong(startTimeout.millis());
@ -147,7 +147,7 @@ public class StartSchedulerAction
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(SchedulerConfig.ID.getPreferredName(), schedulerId);
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
builder.field(START_TIME.getPreferredName(), startTime);
if (endTime != null) {
builder.field(END_TIME.getPreferredName(), endTime);
@ -158,7 +158,7 @@ public class StartSchedulerAction
@Override
public int hashCode() {
return Objects.hash(schedulerId, startTime, endTime);
return Objects.hash(datafeedId, startTime, endTime);
}
@Override
@ -170,7 +170,7 @@ public class StartSchedulerAction
return false;
}
Request other = (Request) obj;
return Objects.equals(schedulerId, other.schedulerId) &&
return Objects.equals(datafeedId, other.datafeedId) &&
Objects.equals(startTime, other.startTime) &&
Objects.equals(endTime, other.endTime);
}
@ -178,7 +178,7 @@ public class StartSchedulerAction
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, StartSchedulerAction action) {
public RequestBuilder(ElasticsearchClient client, StartDatafeedAction action) {
super(client, action, new Request());
}
}
@ -222,33 +222,33 @@ public class StartSchedulerAction
public static class TransportAction extends HandledTransportAction<Request, Response> {
private final ClusterService clusterService;
private final SchedulerStatusObserver schedulerStatusObserver;
private final InternalStartSchedulerAction.TransportAction transportAction;
private final DatafeedStatusObserver datafeedStatusObserver;
private final InternalStartDatafeedAction.TransportAction transportAction;
@Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ClusterService clusterService, InternalStartSchedulerAction.TransportAction transportAction) {
super(settings, StartSchedulerAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
ClusterService clusterService, InternalStartDatafeedAction.TransportAction transportAction) {
super(settings, StartDatafeedAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
Request::new);
this.clusterService = clusterService;
this.schedulerStatusObserver = new SchedulerStatusObserver(threadPool, clusterService);
this.datafeedStatusObserver = new DatafeedStatusObserver(threadPool, clusterService);
this.transportAction = transportAction;
}
@Override
protected void doExecute(Request request, ActionListener<Response> listener) {
// This validation happens also in ScheduledJobRunner, the reason we do it here too is that if it fails there
// This validation happens also in DatafeedJobRunner, the reason we do it here too is that if it fails there
// we are unable to provide the user immediate feedback. We would create the task and the validation would fail
// in the background, whereas now the validation failure is part of the response being returned.
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
ScheduledJobRunner.validate(request.schedulerId, mlMetadata);
DatafeedJobRunner.validate(request.datafeedId, mlMetadata);
InternalStartSchedulerAction.Request internalRequest =
new InternalStartSchedulerAction.Request(request.schedulerId, request.startTime);
InternalStartDatafeedAction.Request internalRequest =
new InternalStartDatafeedAction.Request(request.datafeedId, request.startTime);
internalRequest.setEndTime(request.endTime);
transportAction.execute(internalRequest, LoggingTaskListener.instance());
schedulerStatusObserver.waitForStatus(request.schedulerId, request.startTimeout, SchedulerStatus.STARTED, e -> {
datafeedStatusObserver.waitForStatus(request.datafeedId, request.startTimeout, DatafeedStatus.STARTED, e -> {
if (e != null) {
listener.onFailure(e);
} else {

View File

@ -35,22 +35,22 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.utils.SchedulerStatusObserver;
import org.elasticsearch.xpack.ml.utils.DatafeedStatusObserver;
import java.io.IOException;
import java.util.Objects;
public class StopSchedulerAction
extends Action<StopSchedulerAction.Request, StopSchedulerAction.Response, StopSchedulerAction.RequestBuilder> {
public class StopDatafeedAction
extends Action<StopDatafeedAction.Request, StopDatafeedAction.Response, StopDatafeedAction.RequestBuilder> {
public static final StopSchedulerAction INSTANCE = new StopSchedulerAction();
public static final String NAME = "cluster:admin/ml/scheduler/stop";
public static final StopDatafeedAction INSTANCE = new StopDatafeedAction();
public static final String NAME = "cluster:admin/ml/datafeed/stop";
private StopSchedulerAction() {
private StopDatafeedAction() {
super(NAME);
}
@ -66,18 +66,18 @@ public class StopSchedulerAction
public static class Request extends ActionRequest {
private String schedulerId;
private String datafeedId;
private TimeValue stopTimeout = TimeValue.timeValueSeconds(30);
public Request(String jobId) {
this.schedulerId = ExceptionsHelper.requireNonNull(jobId, SchedulerConfig.ID.getPreferredName());
this.datafeedId = ExceptionsHelper.requireNonNull(jobId, DatafeedConfig.ID.getPreferredName());
}
Request() {
}
public String getSchedulerId() {
return schedulerId;
public String getDatafeedId() {
return datafeedId;
}
public void setStopTimeout(TimeValue stopTimeout) {
@ -92,18 +92,18 @@ public class StopSchedulerAction
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
schedulerId = in.readString();
datafeedId = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(schedulerId);
out.writeString(datafeedId);
}
@Override
public int hashCode() {
return Objects.hash(schedulerId);
return Objects.hash(datafeedId);
}
@Override
@ -115,13 +115,13 @@ public class StopSchedulerAction
return false;
}
Request other = (Request) obj;
return Objects.equals(schedulerId, other.schedulerId);
return Objects.equals(datafeedId, other.datafeedId);
}
}
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, StopSchedulerAction action) {
public RequestBuilder(ElasticsearchClient client, StopDatafeedAction action) {
super(client, action, new Request());
}
}
@ -148,34 +148,34 @@ public class StopSchedulerAction
private final ClusterService clusterService;
private final TransportListTasksAction listTasksAction;
private final TransportCancelTasksAction cancelTasksAction;
private final SchedulerStatusObserver schedulerStatusObserver;
private final DatafeedStatusObserver datafeedStatusObserver;
@Inject
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ClusterService clusterService, TransportCancelTasksAction cancelTasksAction,
TransportListTasksAction listTasksAction) {
super(settings, StopSchedulerAction.NAME, threadPool, transportService, actionFilters,
super(settings, StopDatafeedAction.NAME, threadPool, transportService, actionFilters,
indexNameExpressionResolver, Request::new);
this.clusterService = clusterService;
this.listTasksAction = listTasksAction;
this.cancelTasksAction = cancelTasksAction;
this.schedulerStatusObserver = new SchedulerStatusObserver(threadPool, clusterService);
this.datafeedStatusObserver = new DatafeedStatusObserver(threadPool, clusterService);
}
@Override
protected void doExecute(Request request, ActionListener<Response> listener) {
String schedulerId = request.getSchedulerId();
String datafeedId = request.getDatafeedId();
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
validate(schedulerId, mlMetadata);
validate(datafeedId, mlMetadata);
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions(InternalStartSchedulerAction.NAME);
listTasksRequest.setActions(InternalStartDatafeedAction.NAME);
listTasksRequest.setDetailed(true);
listTasksAction.execute(listTasksRequest, new ActionListener<ListTasksResponse>() {
@Override
public void onResponse(ListTasksResponse listTasksResponse) {
String expectedJobDescription = "scheduler-" + schedulerId;
String expectedJobDescription = "datafeed-" + datafeedId;
for (TaskInfo taskInfo : listTasksResponse.getTasks()) {
if (expectedJobDescription.equals(taskInfo.getDescription())) {
CancelTasksRequest cancelTasksRequest = new CancelTasksRequest();
@ -183,7 +183,7 @@ public class StopSchedulerAction
cancelTasksAction.execute(cancelTasksRequest, new ActionListener<CancelTasksResponse>() {
@Override
public void onResponse(CancelTasksResponse cancelTasksResponse) {
schedulerStatusObserver.waitForStatus(schedulerId, request.stopTimeout, SchedulerStatus.STOPPED, e -> {
datafeedStatusObserver.waitForStatus(datafeedId, request.stopTimeout, DatafeedStatus.STOPPED, e -> {
if (e != null) {
listener.onFailure(e);
} else {
@ -200,7 +200,7 @@ public class StopSchedulerAction
return;
}
}
listener.onFailure(new ResourceNotFoundException("No scheduler [" + schedulerId + "] running"));
listener.onFailure(new ResourceNotFoundException("No datafeed [" + datafeedId + "] running"));
}
@Override
@ -213,15 +213,15 @@ public class StopSchedulerAction
}
static void validate(String schedulerId, MlMetadata mlMetadata) {
Scheduler scheduler = mlMetadata.getScheduler(schedulerId);
if (scheduler == null) {
throw new ResourceNotFoundException(Messages.getMessage(Messages.SCHEDULER_NOT_FOUND, schedulerId));
static void validate(String datafeedId, MlMetadata mlMetadata) {
Datafeed datafeed = mlMetadata.getDatafeed(datafeedId);
if (datafeed == null) {
throw new ResourceNotFoundException(Messages.getMessage(Messages.DATAFEED_NOT_FOUND, datafeedId));
}
if (scheduler.getStatus() == SchedulerStatus.STOPPED) {
throw new ElasticsearchStatusException("scheduler already stopped, expected scheduler status [{}], but got [{}]",
RestStatus.CONFLICT, SchedulerStatus.STARTED, scheduler.getStatus());
if (datafeed.getStatus() == DatafeedStatus.STOPPED) {
throw new ElasticsearchStatusException("datafeed already stopped, expected datafeed status [{}], but got [{}]",
RestStatus.CONFLICT, DatafeedStatus.STARTED, datafeed.getStatus());
}
}
}

View File

@ -25,21 +25,21 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.job.manager.JobManager;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Objects;
public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAction.Request,
UpdateSchedulerStatusAction.Response, UpdateSchedulerStatusAction.RequestBuilder> {
public class UpdateDatafeedStatusAction extends Action<UpdateDatafeedStatusAction.Request,
UpdateDatafeedStatusAction.Response, UpdateDatafeedStatusAction.RequestBuilder> {
public static final UpdateSchedulerStatusAction INSTANCE = new UpdateSchedulerStatusAction();
public static final String NAME = "cluster:admin/ml/scheduler/status/update";
public static final UpdateDatafeedStatusAction INSTANCE = new UpdateDatafeedStatusAction();
public static final String NAME = "cluster:admin/ml/datafeed/status/update";
private UpdateSchedulerStatusAction() {
private UpdateDatafeedStatusAction() {
super(NAME);
}
@ -55,30 +55,30 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
public static class Request extends AcknowledgedRequest<Request> {
private String schedulerId;
private SchedulerStatus schedulerStatus;
private String datafeedId;
private DatafeedStatus datafeedStatus;
public Request(String schedulerId, SchedulerStatus schedulerStatus) {
this.schedulerId = ExceptionsHelper.requireNonNull(schedulerId, SchedulerConfig.ID.getPreferredName());
this.schedulerStatus = ExceptionsHelper.requireNonNull(schedulerStatus, "status");
public Request(String datafeedId, DatafeedStatus datafeedStatus) {
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
this.datafeedStatus = ExceptionsHelper.requireNonNull(datafeedStatus, "status");
}
Request() {}
public String getSchedulerId() {
return schedulerId;
public String getDatafeedId() {
return datafeedId;
}
public void setSchedulerId(String schedulerId) {
this.schedulerId = schedulerId;
public void setDatafeedId(String datafeedId) {
this.datafeedId = datafeedId;
}
public SchedulerStatus getSchedulerStatus() {
return schedulerStatus;
public DatafeedStatus getDatafeedStatus() {
return datafeedStatus;
}
public void setSchedulerStatus(SchedulerStatus schedulerStatus) {
this.schedulerStatus = schedulerStatus;
public void setDatafeedStatus(DatafeedStatus datafeedStatus) {
this.datafeedStatus = datafeedStatus;
}
@Override
@ -89,20 +89,20 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
schedulerId = in.readString();
schedulerStatus = SchedulerStatus.fromStream(in);
datafeedId = in.readString();
datafeedStatus = DatafeedStatus.fromStream(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(schedulerId);
schedulerStatus.writeTo(out);
out.writeString(datafeedId);
datafeedStatus.writeTo(out);
}
@Override
public int hashCode() {
return Objects.hash(schedulerId, schedulerStatus);
return Objects.hash(datafeedId, datafeedStatus);
}
@Override
@ -113,22 +113,22 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
if (obj == null || obj.getClass() != getClass()) {
return false;
}
UpdateSchedulerStatusAction.Request other = (UpdateSchedulerStatusAction.Request) obj;
return Objects.equals(schedulerId, other.schedulerId) && Objects.equals(schedulerStatus, other.schedulerStatus);
UpdateDatafeedStatusAction.Request other = (UpdateDatafeedStatusAction.Request) obj;
return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(datafeedStatus, other.datafeedStatus);
}
@Override
public String toString() {
return "Request{" +
SchedulerConfig.ID.getPreferredName() + "='" + schedulerId + "', " +
"status=" + schedulerStatus +
DatafeedConfig.ID.getPreferredName() + "='" + datafeedId + "', " +
"status=" + datafeedStatus +
'}';
}
}
static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, UpdateSchedulerStatusAction action) {
public RequestBuilder(ElasticsearchClient client, UpdateDatafeedStatusAction action) {
super(client, action, new Request());
}
}
@ -162,7 +162,7 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
JobManager jobManager) {
super(settings, UpdateSchedulerStatusAction.NAME, transportService, clusterService, threadPool, actionFilters,
super(settings, UpdateDatafeedStatusAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, Request::new);
this.jobManager = jobManager;
}
@ -179,7 +179,7 @@ public class UpdateSchedulerStatusAction extends Action<UpdateSchedulerStatusAct
@Override
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
jobManager.updateSchedulerStatus(request, listener);
jobManager.updateDatafeedStatus(request, listener);
}
@Override

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.ParseField;
@ -17,34 +17,34 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public class Scheduler extends AbstractDiffable<Scheduler> implements ToXContent {
public class Datafeed extends AbstractDiffable<Datafeed> implements ToXContent {
private static final ParseField CONFIG_FIELD = new ParseField("config");
private static final ParseField STATUS_FIELD = new ParseField("status");
// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("schedulers");
public static final ParseField RESULTS_FIELD = new ParseField("datafeeds");
public static final ConstructingObjectParser<Scheduler, Void> PARSER = new ConstructingObjectParser<>("scheduler",
a -> new Scheduler(((SchedulerConfig.Builder) a[0]).build(), (SchedulerStatus) a[1]));
public static final ConstructingObjectParser<Datafeed, Void> PARSER = new ConstructingObjectParser<>("datafeed",
a -> new Datafeed(((DatafeedConfig.Builder) a[0]).build(), (DatafeedStatus) a[1]));
static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(), SchedulerConfig.PARSER, CONFIG_FIELD);
PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> SchedulerStatus.fromString(p.text()), STATUS_FIELD,
PARSER.declareObject(ConstructingObjectParser.constructorArg(), DatafeedConfig.PARSER, CONFIG_FIELD);
PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> DatafeedStatus.fromString(p.text()), STATUS_FIELD,
ObjectParser.ValueType.STRING);
}
private final SchedulerConfig config;
private final SchedulerStatus status;
private final DatafeedConfig config;
private final DatafeedStatus status;
public Scheduler(SchedulerConfig config, SchedulerStatus status) {
public Datafeed(DatafeedConfig config, DatafeedStatus status) {
this.config = config;
this.status = status;
}
public Scheduler(StreamInput in) throws IOException {
this.config = new SchedulerConfig(in);
this.status = SchedulerStatus.fromStream(in);
public Datafeed(StreamInput in) throws IOException {
this.config = new DatafeedConfig(in);
this.status = DatafeedStatus.fromStream(in);
}
public String getId() {
@ -55,11 +55,11 @@ public class Scheduler extends AbstractDiffable<Scheduler> implements ToXContent
return config.getJobId();
}
public SchedulerConfig getConfig() {
public DatafeedConfig getConfig() {
return config;
}
public SchedulerStatus getStatus() {
public DatafeedStatus getStatus() {
return status;
}
@ -82,7 +82,7 @@ public class Scheduler extends AbstractDiffable<Scheduler> implements ToXContent
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Scheduler that = (Scheduler) o;
Datafeed that = (Datafeed) o;
return Objects.equals(config, that.config) &&
Objects.equals(status, that.status);
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.support.ToXContentToBytes;
@ -32,14 +32,14 @@ import java.util.List;
import java.util.Objects;
/**
* Scheduler configuration options. Describes where to proactively pull input
* Datafeed configuration options. Describes where to proactively pull input
* data from.
* <p>
* If a value has not been set it will be <code>null</code>. Object wrappers are
* used around integral types and booleans so they can take <code>null</code>
* values.
*/
public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public class DatafeedConfig extends ToXContentToBytes implements Writeable {
/**
* The field name used to specify aggregation fields in Elasticsearch
@ -52,7 +52,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
*/
public static final String DOC_COUNT = "doc_count";
public static final ParseField ID = new ParseField("scheduler_id");
public static final ParseField ID = new ParseField("datafeed_id");
public static final ParseField QUERY_DELAY = new ParseField("query_delay");
public static final ParseField FREQUENCY = new ParseField("frequency");
public static final ParseField INDEXES = new ParseField("indexes");
@ -64,7 +64,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields");
public static final ParseField SOURCE = new ParseField("_source");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("scheduler_config", Builder::new);
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("datafeed_config", Builder::new);
static {
PARSER.declareString(Builder::setId, ID);
@ -111,7 +111,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
private final Integer scrollSize;
private final boolean source;
private SchedulerConfig(String id, String jobId, Long queryDelay, Long frequency, List<String> indexes, List<String> types,
private DatafeedConfig(String id, String jobId, Long queryDelay, Long frequency, List<String> indexes, List<String> types,
QueryBuilder query, AggregatorFactories.Builder aggregations,
List<SearchSourceBuilder.ScriptField> scriptFields, Integer scrollSize, boolean source) {
this.id = id;
@ -127,7 +127,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
this.source = source;
}
public SchedulerConfig(StreamInput in) throws IOException {
public DatafeedConfig(StreamInput in) throws IOException {
this.id = in.readString();
this.jobId = in.readString();
this.queryDelay = in.readOptionalLong();
@ -285,11 +285,11 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
return true;
}
if (other instanceof SchedulerConfig == false) {
if (other instanceof DatafeedConfig == false) {
return false;
}
SchedulerConfig that = (SchedulerConfig) other;
DatafeedConfig that = (DatafeedConfig) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.jobId, that.jobId)
@ -335,7 +335,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
}
public Builder(SchedulerConfig config) {
public Builder(DatafeedConfig config) {
this.id = config.id;
this.jobId = config.jobId;
this.queryDelay = config.queryDelay;
@ -349,8 +349,8 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
this.source = config.source;
}
public void setId(String schedulerId) {
id = ExceptionsHelper.requireNonNull(schedulerId, ID.getPreferredName());
public void setId(String datafeedId) {
id = ExceptionsHelper.requireNonNull(datafeedId, ID.getPreferredName());
}
public void setJobId(String jobId) {
@ -367,8 +367,8 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public void setQueryDelay(long queryDelay) {
if (queryDelay < 0) {
String msg = Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE,
SchedulerConfig.QUERY_DELAY.getPreferredName(), queryDelay);
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay);
throw new IllegalArgumentException(msg);
}
this.queryDelay = queryDelay;
@ -376,8 +376,8 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public void setFrequency(long frequency) {
if (frequency <= 0) {
String msg = Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE,
SchedulerConfig.FREQUENCY.getPreferredName(), frequency);
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
DatafeedConfig.FREQUENCY.getPreferredName(), frequency);
throw new IllegalArgumentException(msg);
}
this.frequency = frequency;
@ -399,8 +399,8 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
public void setScrollSize(int scrollSize) {
if (scrollSize < 0) {
String msg = Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE,
SchedulerConfig.SCROLL_SIZE.getPreferredName(), scrollSize);
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
DatafeedConfig.SCROLL_SIZE.getPreferredName(), scrollSize);
throw new IllegalArgumentException(msg);
}
this.scrollSize = scrollSize;
@ -410,7 +410,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
this.source = enabled;
}
public SchedulerConfig build() {
public DatafeedConfig build() {
ExceptionsHelper.requireNonNull(id, ID.getPreferredName());
ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
if (!MlStrings.isValidId(id)) {
@ -422,12 +422,12 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
if (types == null || types.isEmpty() || types.contains(null) || types.contains("")) {
throw invalidOptionValue(TYPES.getPreferredName(), types);
}
return new SchedulerConfig(id, jobId, queryDelay, frequency, indexes, types, query, aggregations, scriptFields, scrollSize,
return new DatafeedConfig(id, jobId, queryDelay, frequency, indexes, types, query, aggregations, scriptFields, scrollSize,
source);
}
private static ElasticsearchException invalidOptionValue(String fieldName, Object value) {
String msg = Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, fieldName, value);
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, fieldName, value);
throw new IllegalArgumentException(msg);
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.Client;
@ -17,8 +17,8 @@ import org.elasticsearch.xpack.ml.job.DataCounts;
import org.elasticsearch.xpack.ml.job.DataDescription;
import org.elasticsearch.xpack.ml.job.audit.Auditor;
import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@ -29,9 +29,9 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;
class ScheduledJob {
class DatafeedJob {
private static final Logger LOGGER = Loggers.getLogger(ScheduledJob.class);
private static final Logger LOGGER = Loggers.getLogger(DatafeedJob.class);
private static final int NEXT_TASK_DELAY_MS = 100;
private final Auditor auditor;
@ -47,7 +47,7 @@ class ScheduledJob {
private volatile Long lastEndTimeMs;
private AtomicBoolean running = new AtomicBoolean(true);
ScheduledJob(String jobId, DataDescription dataDescription, long frequencyMs, long queryDelayMs,
DatafeedJob(String jobId, DataDescription dataDescription, long frequencyMs, long queryDelayMs,
DataExtractorFactory dataExtractorFactory, Client client, Auditor auditor, Supplier<Long> currentTimeSupplier,
long latestFinalBucketEndTimeMs, long latestRecordTimeMs) {
this.jobId = jobId;
@ -74,12 +74,12 @@ class ScheduledJob {
if (isLookbackOnly) {
return null;
} else {
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_STARTED_REALTIME));
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STARTED_REALTIME));
return nextRealtimeTimestamp();
}
}
String msg = Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_STARTED_FROM_TO,
String msg = Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STARTED_FROM_TO,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackStartTimeMs),
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackEnd));
auditor.info(msg);
@ -87,12 +87,12 @@ class ScheduledJob {
FlushJobAction.Request request = new FlushJobAction.Request(jobId);
request.setCalcInterim(true);
run(lookbackStartTimeMs, lookbackEnd, request);
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_LOOKBACK_COMPLETED));
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_LOOKBACK_COMPLETED));
LOGGER.info("[{}] Lookback has finished", jobId);
if (isLookbackOnly) {
return null;
} else {
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_CONTINUED_REALTIME));
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_CONTINUED_REALTIME));
return nextRealtimeTimestamp();
}
}
@ -109,14 +109,14 @@ class ScheduledJob {
}
/**
* Stops the scheduled job
* Stops the datafeed job
*
* @return <code>true</code> when the scheduler was running and this method invocation stopped it,
* @return <code>true</code> when the datafeed was running and this method invocation stopped it,
* otherwise <code>false</code> is returned
*/
public boolean stop() {
if (running.compareAndSet(true, false)) {
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_STOPPED));
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STOPPED));
return true;
} else {
return false;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException;
@ -17,8 +17,8 @@ import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.InternalStartSchedulerAction;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction;
import org.elasticsearch.xpack.ml.action.InternalStartDatafeedAction;
import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction;
import org.elasticsearch.xpack.ml.job.DataCounts;
import org.elasticsearch.xpack.ml.job.DataDescription;
import org.elasticsearch.xpack.ml.job.Job;
@ -31,8 +31,8 @@ import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.job.results.Bucket;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.ml.scheduler.extractor.scroll.ScrollDataExtractorFactory;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.ml.datafeed.extractor.scroll.ScrollDataExtractorFactory;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.time.Duration;
@ -43,7 +43,7 @@ import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Supplier;
public class ScheduledJobRunner extends AbstractComponent {
public class DatafeedJobRunner extends AbstractComponent {
private final Client client;
private final ClusterService clusterService;
@ -51,7 +51,7 @@ public class ScheduledJobRunner extends AbstractComponent {
private final ThreadPool threadPool;
private final Supplier<Long> currentTimeSupplier;
public ScheduledJobRunner(ThreadPool threadPool, Client client, ClusterService clusterService, JobProvider jobProvider,
public DatafeedJobRunner(ThreadPool threadPool, Client client, ClusterService clusterService, JobProvider jobProvider,
Supplier<Long> currentTimeSupplier) {
super(Settings.EMPTY);
this.client = Objects.requireNonNull(client);
@ -61,13 +61,13 @@ public class ScheduledJobRunner extends AbstractComponent {
this.currentTimeSupplier = Objects.requireNonNull(currentTimeSupplier);
}
public void run(String schedulerId, long startTime, Long endTime, InternalStartSchedulerAction.SchedulerTask task,
public void run(String datafeedId, long startTime, Long endTime, InternalStartDatafeedAction.DatafeedTask task,
Consumer<Exception> handler) {
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
validate(schedulerId, mlMetadata);
validate(datafeedId, mlMetadata);
Scheduler scheduler = mlMetadata.getScheduler(schedulerId);
Job job = mlMetadata.getJobs().get(scheduler.getJobId());
Datafeed datafeed = mlMetadata.getDatafeed(datafeedId);
Job job = mlMetadata.getJobs().get(datafeed.getJobId());
gatherInformation(job.getId(), (buckets, dataCounts) -> {
long latestFinalBucketEndMs = -1L;
Duration bucketSpan = Duration.ofSeconds(job.getAnalysisConfig().getBucketSpan());
@ -78,47 +78,47 @@ public class ScheduledJobRunner extends AbstractComponent {
if (dataCounts.getLatestRecordTimeStamp() != null) {
latestRecordTimeMs = dataCounts.getLatestRecordTimeStamp().getTime();
}
Holder holder = createJobScheduler(scheduler, job, latestFinalBucketEndMs, latestRecordTimeMs, handler, task);
Holder holder = createJobDatafeed(datafeed, job, latestFinalBucketEndMs, latestRecordTimeMs, handler, task);
innerRun(holder, startTime, endTime);
}, handler);
}
// Important: Holder must be created and assigned to SchedulerTask before setting status to started,
// otherwise if a stop scheduler call is made immediately after the start scheduler call we could cancel
// the SchedulerTask without stopping scheduler, which causes the scheduler to keep on running.
// Important: Holder must be created and assigned to DatafeedTask before setting status to started,
// otherwise if a stop datafeed call is made immediately after the start datafeed call we could cancel
// the DatafeedTask without stopping datafeed, which causes the datafeed to keep on running.
private void innerRun(Holder holder, long startTime, Long endTime) {
setJobSchedulerStatus(holder.scheduler.getId(), SchedulerStatus.STARTED, error -> {
setJobDatafeedStatus(holder.datafeed.getId(), DatafeedStatus.STARTED, error -> {
if (error != null) {
holder.stop(error);
return;
}
logger.info("Starting scheduler [{}] for job [{}]", holder.scheduler.getId(), holder.scheduler.getJobId());
holder.future = threadPool.executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME).submit(() -> {
logger.info("Starting datafeed [{}] for job [{}]", holder.datafeed.getId(), holder.datafeed.getJobId());
holder.future = threadPool.executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME).submit(() -> {
Long next = null;
try {
next = holder.scheduledJob.runLookBack(startTime, endTime);
} catch (ScheduledJob.ExtractionProblemException e) {
next = holder.datafeedJob.runLookBack(startTime, endTime);
} catch (DatafeedJob.ExtractionProblemException e) {
if (endTime == null) {
next = e.nextDelayInMsSinceEpoch;
}
holder.problemTracker.reportExtractionProblem(e.getCause().getMessage());
} catch (ScheduledJob.AnalysisProblemException e) {
} catch (DatafeedJob.AnalysisProblemException e) {
if (endTime == null) {
next = e.nextDelayInMsSinceEpoch;
}
holder.problemTracker.reportAnalysisProblem(e.getCause().getMessage());
} catch (ScheduledJob.EmptyDataCountException e) {
} catch (DatafeedJob.EmptyDataCountException e) {
if (endTime == null && holder.problemTracker.updateEmptyDataCount(true) == false) {
next = e.nextDelayInMsSinceEpoch;
}
} catch (Exception e) {
logger.error("Failed lookback import for job [" + holder.scheduler.getJobId() + "]", e);
logger.error("Failed lookback import for job [" + holder.datafeed.getJobId() + "]", e);
holder.stop(e);
return;
}
if (next != null) {
doScheduleRealtime(next, holder.scheduler.getJobId(), holder);
doDatafeedRealtime(next, holder.datafeed.getJobId(), holder);
} else {
holder.stop(null);
holder.problemTracker.finishReport();
@ -127,21 +127,21 @@ public class ScheduledJobRunner extends AbstractComponent {
});
}
private void doScheduleRealtime(long delayInMsSinceEpoch, String jobId, Holder holder) {
private void doDatafeedRealtime(long delayInMsSinceEpoch, String jobId, Holder holder) {
if (holder.isRunning()) {
TimeValue delay = computeNextDelay(delayInMsSinceEpoch);
logger.debug("Waiting [{}] before executing next realtime import for job [{}]", delay, jobId);
holder.future = threadPool.schedule(delay, MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME, () -> {
holder.future = threadPool.schedule(delay, MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME, () -> {
long nextDelayInMsSinceEpoch;
try {
nextDelayInMsSinceEpoch = holder.scheduledJob.runRealtime();
} catch (ScheduledJob.ExtractionProblemException e) {
nextDelayInMsSinceEpoch = holder.datafeedJob.runRealtime();
} catch (DatafeedJob.ExtractionProblemException e) {
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
holder.problemTracker.reportExtractionProblem(e.getCause().getMessage());
} catch (ScheduledJob.AnalysisProblemException e) {
} catch (DatafeedJob.AnalysisProblemException e) {
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
holder.problemTracker.reportAnalysisProblem(e.getCause().getMessage());
} catch (ScheduledJob.EmptyDataCountException e) {
} catch (DatafeedJob.EmptyDataCountException e) {
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
if (holder.problemTracker.updateEmptyDataCount(true)) {
holder.problemTracker.finishReport();
@ -149,56 +149,56 @@ public class ScheduledJobRunner extends AbstractComponent {
return;
}
} catch (Exception e) {
logger.error("Unexpected scheduler failure for job [" + jobId + "] stopping...", e);
logger.error("Unexpected datafeed failure for job [" + jobId + "] stopping...", e);
holder.stop(e);
return;
}
holder.problemTracker.finishReport();
doScheduleRealtime(nextDelayInMsSinceEpoch, jobId, holder);
doDatafeedRealtime(nextDelayInMsSinceEpoch, jobId, holder);
});
}
}
public static void validate(String schedulerId, MlMetadata mlMetadata) {
Scheduler scheduler = mlMetadata.getScheduler(schedulerId);
if (scheduler == null) {
throw ExceptionsHelper.missingSchedulerException(schedulerId);
public static void validate(String datafeedId, MlMetadata mlMetadata) {
Datafeed datafeed = mlMetadata.getDatafeed(datafeedId);
if (datafeed == null) {
throw ExceptionsHelper.missingDatafeedException(datafeedId);
}
Job job = mlMetadata.getJobs().get(scheduler.getJobId());
Job job = mlMetadata.getJobs().get(datafeed.getJobId());
if (job == null) {
throw ExceptionsHelper.missingJobException(scheduler.getJobId());
throw ExceptionsHelper.missingJobException(datafeed.getJobId());
}
Allocation allocation = mlMetadata.getAllocations().get(scheduler.getJobId());
Allocation allocation = mlMetadata.getAllocations().get(datafeed.getJobId());
if (allocation.getStatus() != JobStatus.OPENED) {
throw new ElasticsearchStatusException("cannot start scheduler, expected job status [{}], but got [{}]",
throw new ElasticsearchStatusException("cannot start datafeed, expected job status [{}], but got [{}]",
RestStatus.CONFLICT, JobStatus.OPENED, allocation.getStatus());
}
SchedulerStatus status = scheduler.getStatus();
if (status != SchedulerStatus.STOPPED) {
throw new ElasticsearchStatusException("scheduler already started, expected scheduler status [{}], but got [{}]",
RestStatus.CONFLICT, SchedulerStatus.STOPPED, status);
DatafeedStatus status = datafeed.getStatus();
if (status != DatafeedStatus.STOPPED) {
throw new ElasticsearchStatusException("datafeed already started, expected datafeed status [{}], but got [{}]",
RestStatus.CONFLICT, DatafeedStatus.STOPPED, status);
}
ScheduledJobValidator.validate(scheduler.getConfig(), job);
DatafeedJobValidator.validate(datafeed.getConfig(), job);
}
private Holder createJobScheduler(Scheduler scheduler, Job job, long finalBucketEndMs, long latestRecordTimeMs,
Consumer<Exception> handler, InternalStartSchedulerAction.SchedulerTask task) {
private Holder createJobDatafeed(Datafeed datafeed, Job job, long finalBucketEndMs, long latestRecordTimeMs,
Consumer<Exception> handler, InternalStartDatafeedAction.DatafeedTask task) {
Auditor auditor = jobProvider.audit(job.getId());
Duration frequency = getFrequencyOrDefault(scheduler, job);
Duration queryDelay = Duration.ofSeconds(scheduler.getConfig().getQueryDelay());
DataExtractorFactory dataExtractorFactory = createDataExtractorFactory(scheduler.getConfig(), job);
ScheduledJob scheduledJob = new ScheduledJob(job.getId(), buildDataDescription(job), frequency.toMillis(), queryDelay.toMillis(),
Duration frequency = getFrequencyOrDefault(datafeed, job);
Duration queryDelay = Duration.ofSeconds(datafeed.getConfig().getQueryDelay());
DataExtractorFactory dataExtractorFactory = createDataExtractorFactory(datafeed.getConfig(), job);
DatafeedJob datafeedJob = new DatafeedJob(job.getId(), buildDataDescription(job), frequency.toMillis(), queryDelay.toMillis(),
dataExtractorFactory, client, auditor, currentTimeSupplier, finalBucketEndMs, latestRecordTimeMs);
Holder holder = new Holder(scheduler, scheduledJob, new ProblemTracker(() -> auditor), handler);
Holder holder = new Holder(datafeed, datafeedJob, new ProblemTracker(() -> auditor), handler);
task.setHolder(holder);
return holder;
}
DataExtractorFactory createDataExtractorFactory(SchedulerConfig schedulerConfig, Job job) {
return new ScrollDataExtractorFactory(client, schedulerConfig, job);
DataExtractorFactory createDataExtractorFactory(DatafeedConfig datafeedConfig, Job job) {
return new ScrollDataExtractorFactory(client, datafeedConfig, job);
}
private static DataDescription buildDataDescription(Job job) {
@ -229,8 +229,8 @@ public class ScheduledJobRunner extends AbstractComponent {
});
}
private static Duration getFrequencyOrDefault(Scheduler scheduler, Job job) {
Long frequency = scheduler.getConfig().getFrequency();
private static Duration getFrequencyOrDefault(Datafeed datafeed, Job job) {
Long frequency = datafeed.getConfig().getFrequency();
Long bucketSpan = job.getAnalysisConfig().getBucketSpan();
return frequency == null ? DefaultFrequency.ofBucketSpan(bucketSpan) : Duration.ofSeconds(frequency);
}
@ -239,22 +239,22 @@ public class ScheduledJobRunner extends AbstractComponent {
return new TimeValue(Math.max(1, next - currentTimeSupplier.get()));
}
private void setJobSchedulerStatus(String schedulerId, SchedulerStatus status, Consumer<Exception> handler) {
UpdateSchedulerStatusAction.Request request = new UpdateSchedulerStatusAction.Request(schedulerId, status);
client.execute(UpdateSchedulerStatusAction.INSTANCE, request, new ActionListener<UpdateSchedulerStatusAction.Response>() {
private void setJobDatafeedStatus(String datafeedId, DatafeedStatus status, Consumer<Exception> handler) {
UpdateDatafeedStatusAction.Request request = new UpdateDatafeedStatusAction.Request(datafeedId, status);
client.execute(UpdateDatafeedStatusAction.INSTANCE, request, new ActionListener<UpdateDatafeedStatusAction.Response>() {
@Override
public void onResponse(UpdateSchedulerStatusAction.Response response) {
public void onResponse(UpdateDatafeedStatusAction.Response response) {
if (response.isAcknowledged()) {
logger.debug("successfully set scheduler [{}] status to [{}]", schedulerId, status);
logger.debug("successfully set datafeed [{}] status to [{}]", datafeedId, status);
} else {
logger.info("set scheduler [{}] status to [{}], but was not acknowledged", schedulerId, status);
logger.info("set datafeed [{}] status to [{}], but was not acknowledged", datafeedId, status);
}
handler.accept(null);
}
@Override
public void onFailure(Exception e) {
logger.error("could not set scheduler [" + schedulerId + "] status to [" + status + "]", e);
logger.error("could not set datafeed [" + datafeedId + "] status to [" + status + "]", e);
handler.accept(e);
}
});
@ -262,31 +262,31 @@ public class ScheduledJobRunner extends AbstractComponent {
public class Holder {
private final Scheduler scheduler;
private final ScheduledJob scheduledJob;
private final Datafeed datafeed;
private final DatafeedJob datafeedJob;
private final ProblemTracker problemTracker;
private final Consumer<Exception> handler;
volatile Future<?> future;
private Holder(Scheduler scheduler, ScheduledJob scheduledJob, ProblemTracker problemTracker, Consumer<Exception> handler) {
this.scheduler = scheduler;
this.scheduledJob = scheduledJob;
private Holder(Datafeed datafeed, DatafeedJob datafeedJob, ProblemTracker problemTracker, Consumer<Exception> handler) {
this.datafeed = datafeed;
this.datafeedJob = datafeedJob;
this.problemTracker = problemTracker;
this.handler = handler;
}
boolean isRunning() {
return scheduledJob.isRunning();
return datafeedJob.isRunning();
}
public void stop(Exception e) {
logger.info("attempt to stop scheduler [{}] for job [{}]", scheduler.getId(), scheduler.getJobId());
if (scheduledJob.stop()) {
logger.info("attempt to stop datafeed [{}] for job [{}]", datafeed.getId(), datafeed.getJobId());
if (datafeedJob.stop()) {
FutureUtils.cancel(future);
setJobSchedulerStatus(scheduler.getId(), SchedulerStatus.STOPPED, error -> handler.accept(e));
logger.info("scheduler [{}] for job [{}] has been stopped", scheduler.getId(), scheduler.getJobId());
setJobDatafeedStatus(datafeed.getId(), DatafeedStatus.STOPPED, error -> handler.accept(e));
logger.info("datafeed [{}] for job [{}] has been stopped", datafeed.getId(), datafeed.getJobId());
} else {
logger.info("scheduler [{}] for job [{}] was already stopped", scheduler.getId(), scheduler.getJobId());
logger.info("datafeed [{}] for job [{}] was already stopped", datafeed.getId(), datafeed.getJobId());
}
}

View File

@ -3,29 +3,29 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.xpack.ml.job.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.messages.Messages;
public final class ScheduledJobValidator {
public final class DatafeedJobValidator {
private ScheduledJobValidator() {}
private DatafeedJobValidator() {}
/**
* Validates a schedulerConfig in relation to the job it refers to
* @param schedulerConfig the scheduler config
* Validates a datafeedConfig in relation to the job it refers to
* @param datafeedConfig the datafeed config
* @param job the job
*/
public static void validate(SchedulerConfig schedulerConfig, Job job) {
public static void validate(DatafeedConfig datafeedConfig, Job job) {
AnalysisConfig analysisConfig = job.getAnalysisConfig();
if (analysisConfig.getLatency() != null && analysisConfig.getLatency() > 0) {
throw new IllegalArgumentException(Messages.getMessage(Messages.SCHEDULER_DOES_NOT_SUPPORT_JOB_WITH_LATENCY));
throw new IllegalArgumentException(Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY));
}
if (schedulerConfig.getAggregations() != null && !SchedulerConfig.DOC_COUNT.equals(analysisConfig.getSummaryCountFieldName())) {
if (datafeedConfig.getAggregations() != null && !DatafeedConfig.DOC_COUNT.equals(analysisConfig.getSummaryCountFieldName())) {
throw new IllegalArgumentException(
Messages.getMessage(Messages.SCHEDULER_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, SchedulerConfig.DOC_COUNT));
Messages.getMessage(Messages.DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, DatafeedConfig.DOC_COUNT));
}
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -12,18 +12,18 @@ import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
import java.util.Locale;
public enum SchedulerStatus implements Writeable {
public enum DatafeedStatus implements Writeable {
STARTED, STOPPED;
public static SchedulerStatus fromString(String name) {
public static DatafeedStatus fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
public static SchedulerStatus fromStream(StreamInput in) throws IOException {
public static DatafeedStatus fromStream(StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown public enum SchedulerStatus {\n ordinal [" + ordinal + "]");
throw new IOException("Unknown public enum DatafeedStatus {\n ordinal [" + ordinal + "]");
}
return values()[ordinal];
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.xpack.ml.job.audit.Auditor;
import org.elasticsearch.xpack.ml.job.messages.Messages;
@ -13,7 +13,7 @@ import java.util.function.Supplier;
/**
* <p>
* Keeps track of problems the scheduler encounters and audits
* Keeps track of problems the datafeed encounters and audits
* messages appropriately.
* </p>
* <p>
@ -45,7 +45,7 @@ class ProblemTracker {
* @param problemMessage the problem message
*/
public void reportAnalysisProblem(String problemMessage) {
reportProblem(Messages.JOB_AUDIT_SCHEDULER_DATA_ANALYSIS_ERROR, problemMessage);
reportProblem(Messages.JOB_AUDIT_DATAFEED_DATA_ANALYSIS_ERROR, problemMessage);
}
/**
@ -54,7 +54,7 @@ class ProblemTracker {
* @param problemMessage the problem message
*/
public void reportExtractionProblem(String problemMessage) {
reportProblem(Messages.JOB_AUDIT_SCHEDULER_DATA_EXTRACTION_ERROR, problemMessage);
reportProblem(Messages.JOB_AUDIT_DATAFEED_DATA_EXTRACTION_ERROR, problemMessage);
}
/**
@ -82,12 +82,12 @@ class ProblemTracker {
if (empty && emptyDataCount < EMPTY_DATA_WARN_COUNT) {
emptyDataCount++;
if (emptyDataCount == EMPTY_DATA_WARN_COUNT) {
auditor.get().warning(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_NO_DATA));
auditor.get().warning(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_NO_DATA));
return true;
}
} else if (!empty) {
if (emptyDataCount >= EMPTY_DATA_WARN_COUNT) {
auditor.get().info(Messages.getMessage(Messages.JOB_AUDIR_SCHEDULER_DATA_SEEN_AGAIN));
auditor.get().info(Messages.getMessage(Messages.JOB_AUDIR_DATAFEED_DATA_SEEN_AGAIN));
}
emptyDataCount = 0;
}
@ -103,7 +103,7 @@ class ProblemTracker {
*/
public void finishReport() {
if (!hasProblems && hadProblems) {
auditor.get().info(Messages.getMessage(Messages.JOB_AUDIT_SCHEDULER_RECOVERED));
auditor.get().info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_RECOVERED));
}
hadProblems = hasProblems;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor;
package org.elasticsearch.xpack.ml.datafeed.extractor;
import java.io.IOException;
import java.io.InputStream;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor;
package org.elasticsearch.xpack.ml.datafeed.extractor;
public interface DataExtractorFactory {
DataExtractor newExtractor(long start, long end);

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;

View File

@ -3,11 +3,11 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.util.ArrayList;
import java.util.Arrays;
@ -68,8 +68,8 @@ class ExtractedFields {
throw new RuntimeException("Time field [" + timeField.getName() + "] expected a long value; actual was: " + value[0]);
}
public static ExtractedFields build(Job job, SchedulerConfig schedulerConfig) {
Set<String> scriptFields = schedulerConfig.getScriptFields().stream().map(sf -> sf.fieldName()).collect(Collectors.toSet());
public static ExtractedFields build(Job job, DatafeedConfig datafeedConfig) {
Set<String> scriptFields = datafeedConfig.getScriptFields().stream().map(sf -> sf.fieldName()).collect(Collectors.toSet());
String timeField = job.getDataDescription().getTimeField();
ExtractedField timeExtractedField = ExtractedField.newField(timeField, scriptFields.contains(timeField) ?
ExtractedField.ExtractionMethod.SCRIPT_FIELD : ExtractedField.ExtractionMethod.DOC_VALUE);
@ -78,7 +78,7 @@ class ExtractedFields {
allExtractedFields.add(timeExtractedField);
for (String field : remainingFields) {
ExtractedField.ExtractionMethod method = scriptFields.contains(field) ? ExtractedField.ExtractionMethod.SCRIPT_FIELD :
schedulerConfig.isSource() ? ExtractedField.ExtractionMethod.SOURCE : ExtractedField.ExtractionMethod.DOC_VALUE;
datafeedConfig.isSource() ? ExtractedField.ExtractionMethod.SOURCE : ExtractedField.ExtractionMethod.DOC_VALUE;
allExtractedFields.add(ExtractedField.newField(field, method));
}
return new ExtractedFields(timeExtractedField, allExtractedFields);

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.search.ClearScrollAction;
@ -21,7 +21,7 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;

View File

@ -3,28 +3,28 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.client.Client;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import java.util.Objects;
public class ScrollDataExtractorFactory implements DataExtractorFactory {
private final Client client;
private final SchedulerConfig schedulerConfig;
private final DatafeedConfig datafeedConfig;
private final Job job;
private final ExtractedFields extractedFields;
public ScrollDataExtractorFactory(Client client, SchedulerConfig schedulerConfig, Job job) {
public ScrollDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) {
this.client = Objects.requireNonNull(client);
this.schedulerConfig = Objects.requireNonNull(schedulerConfig);
this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
this.job = Objects.requireNonNull(job);
this.extractedFields = ExtractedFields.build(job, schedulerConfig);
this.extractedFields = ExtractedFields.build(job, datafeedConfig);
}
@Override
@ -32,11 +32,11 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory {
ScrollDataExtractorContext dataExtractorContext = new ScrollDataExtractorContext(
job.getId(),
extractedFields,
schedulerConfig.getIndexes(),
schedulerConfig.getTypes(),
schedulerConfig.getQuery(),
schedulerConfig.getScriptFields(),
schedulerConfig.getScrollSize(),
datafeedConfig.getIndexes(),
datafeedConfig.getTypes(),
datafeedConfig.getQuery(),
datafeedConfig.getScriptFields(),
datafeedConfig.getScrollSize(),
start,
end);
return new ScrollDataExtractor(client, dataExtractorContext);

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.xcontent.XContentBuilder;

View File

@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.job.config;
import java.time.Duration;
/**
* Factory methods for a sensible default for the scheduler frequency
* Factory methods for a sensible default for the datafeed frequency
*/
public final class DefaultFrequency {
private static final int SECONDS_IN_MINUTE = 60;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction;
import org.elasticsearch.xpack.ml.action.UpdateJobStatusAction;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction;
import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction;
import org.elasticsearch.xpack.ml.job.IgnoreDowntime;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.JobStatus;
@ -39,7 +39,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.util.Collections;
@ -56,7 +56,7 @@ import java.util.stream.Collectors;
* <li>creation</li>
* <li>deletion</li>
* <li>updating</li>
* <li>starting/stopping of scheduled jobs</li>
* <li>starting/stopping of datafeed jobs</li>
* </ul>
*/
public class JobManager extends AbstractComponent {
@ -305,23 +305,23 @@ public class JobManager extends AbstractComponent {
return buildNewClusterState(currentState, builder);
}
public void updateSchedulerStatus(UpdateSchedulerStatusAction.Request request,
ActionListener<UpdateSchedulerStatusAction.Response> actionListener) {
String schedulerId = request.getSchedulerId();
SchedulerStatus newStatus = request.getSchedulerStatus();
clusterService.submitStateUpdateTask("update-scheduler-status-" + schedulerId,
new AckedClusterStateUpdateTask<UpdateSchedulerStatusAction.Response>(request, actionListener) {
public void updateDatafeedStatus(UpdateDatafeedStatusAction.Request request,
ActionListener<UpdateDatafeedStatusAction.Response> actionListener) {
String datafeedId = request.getDatafeedId();
DatafeedStatus newStatus = request.getDatafeedStatus();
clusterService.submitStateUpdateTask("update-datafeed-status-" + datafeedId,
new AckedClusterStateUpdateTask<UpdateDatafeedStatusAction.Response>(request, actionListener) {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
MlMetadata.Builder builder = createMlMetadataBuilder(currentState);
builder.updateSchedulerStatus(schedulerId, newStatus);
builder.updateDatafeedStatus(datafeedId, newStatus);
return buildNewClusterState(currentState, builder);
}
@Override
protected UpdateSchedulerStatusAction.Response newResponse(boolean acknowledged) {
return new UpdateSchedulerStatusAction.Response(acknowledged);
protected UpdateDatafeedStatusAction.Response newResponse(boolean acknowledged) {
return new UpdateDatafeedStatusAction.Response(acknowledged);
}
});
}

View File

@ -44,16 +44,16 @@ public final class Messages {
public static final String JOB_AUDIT_REVERTED = "job.audit.reverted";
public static final String JOB_AUDIT_OLD_RESULTS_DELETED = "job.audit.old.results.deleted";
public static final String JOB_AUDIT_SNAPSHOT_DELETED = "job.audit.snapshot.deleted";
public static final String JOB_AUDIT_SCHEDULER_STARTED_FROM_TO = "job.audit.scheduler.started.from.to";
public static final String JOB_AUDIT_SCHEDULER_CONTINUED_REALTIME = "job.audit.scheduler.continued.realtime";
public static final String JOB_AUDIT_SCHEDULER_STARTED_REALTIME = "job.audit.scheduler.started.realtime";
public static final String JOB_AUDIT_SCHEDULER_LOOKBACK_COMPLETED = "job.audit.scheduler.lookback.completed";
public static final String JOB_AUDIT_SCHEDULER_STOPPED = "job.audit.scheduler.stopped";
public static final String JOB_AUDIT_SCHEDULER_NO_DATA = "job.audit.scheduler.no.data";
public static final String JOB_AUDIR_SCHEDULER_DATA_SEEN_AGAIN = "job.audit.scheduler.data.seen.again";
public static final String JOB_AUDIT_SCHEDULER_DATA_ANALYSIS_ERROR = "job.audit.scheduler.data.analysis.error";
public static final String JOB_AUDIT_SCHEDULER_DATA_EXTRACTION_ERROR = "job.audit.scheduler.data.extraction.error";
public static final String JOB_AUDIT_SCHEDULER_RECOVERED = "job.audit.scheduler.recovered";
public static final String JOB_AUDIT_DATAFEED_STARTED_FROM_TO = "job.audit.datafeed.started.from.to";
public static final String JOB_AUDIT_DATAFEED_CONTINUED_REALTIME = "job.audit.datafeed.continued.realtime";
public static final String JOB_AUDIT_DATAFEED_STARTED_REALTIME = "job.audit.datafeed.started.realtime";
public static final String JOB_AUDIT_DATAFEED_LOOKBACK_COMPLETED = "job.audit.datafeed.lookback.completed";
public static final String JOB_AUDIT_DATAFEED_STOPPED = "job.audit.datafeed.stopped";
public static final String JOB_AUDIT_DATAFEED_NO_DATA = "job.audit.datafeed.no.data";
public static final String JOB_AUDIR_DATAFEED_DATA_SEEN_AGAIN = "job.audit.datafeed.data.seen.again";
public static final String JOB_AUDIT_DATAFEED_DATA_ANALYSIS_ERROR = "job.audit.datafeed.data.analysis.error";
public static final String JOB_AUDIT_DATAFEED_DATA_EXTRACTION_ERROR = "job.audit.datafeed.data.extraction.error";
public static final String JOB_AUDIT_DATAFEED_RECOVERED = "job.audit.datafeed.recovered";
public static final String SYSTEM_AUDIT_STARTED = "system.audit.started";
public static final String SYSTEM_AUDIT_SHUTDOWN = "system.audit.shutdown";
@ -155,8 +155,8 @@ public final class Messages {
public static final String JOB_CONFIG_UPDATE_MODEL_SNAPSHOT_RETENTION_DAYS_INVALID = "job.config.update.model.snapshot.retention.days."
+ "invalid";
public static final String JOB_CONFIG_UPDATE_RESULTS_RETENTION_DAYS_INVALID = "job.config.update.results.retention.days.invalid";
public static final String JOB_CONFIG_UPDATE_SCHEDULE_CONFIG_PARSE_ERROR = "job.config.update.scheduler.config.parse.error";
public static final String JOB_CONFIG_UPDATE_SCHEDULE_CONFIG_CANNOT_BE_NULL = "job.config.update.scheduler.config.cannot.be.null";
public static final String JOB_CONFIG_UPDATE_DATAFEED_CONFIG_PARSE_ERROR = "job.config.update.datafeed.config.parse.error";
public static final String JOB_CONFIG_UPDATE_DATAFEED_CONFIG_CANNOT_BE_NULL = "job.config.update.datafeed.config.cannot.be.null";
public static final String JOB_CONFIG_TRANSFORM_CIRCULAR_DEPENDENCY = "job.config.transform.circular.dependency";
public static final String JOB_CONFIG_TRANSFORM_CONDITION_REQUIRED = "job.config.transform.condition.required";
@ -185,18 +185,18 @@ public final class Messages {
public static final String JOB_DATA_CONCURRENT_USE_UPDATE = "job.data.concurrent.use.update";
public static final String JOB_DATA_CONCURRENT_USE_UPLOAD = "job.data.concurrent.use.upload";
public static final String SCHEDULER_CONFIG_INVALID_OPTION_VALUE = "scheduler.config.invalid.option.value";
public static final String DATAFEED_CONFIG_INVALID_OPTION_VALUE = "datafeed.config.invalid.option.value";
public static final String SCHEDULER_DOES_NOT_SUPPORT_JOB_WITH_LATENCY = "scheduler.does.not.support.job.with.latency";
public static final String SCHEDULER_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD =
"scheduler.aggregations.requires.job.with.summary.count.field";
public static final String DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY = "datafeed.does.not.support.job.with.latency";
public static final String DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD =
"datafeed.aggregations.requires.job.with.summary.count.field";
public static final String SCHEDULER_CANNOT_START = "scheduler.cannot.start";
public static final String SCHEDULER_CANNOT_STOP_IN_CURRENT_STATE = "scheduler.cannot.stop.in.current.state";
public static final String SCHEDULER_CANNOT_UPDATE_IN_CURRENT_STATE = "scheduler.cannot.update.in.current.state";
public static final String SCHEDULER_CANNOT_DELETE_IN_CURRENT_STATE = "scheduler.cannot.delete.in.current.state";
public static final String SCHEDULER_FAILED_TO_STOP = "scheduler.failed.to.stop";
public static final String SCHEDULER_NOT_FOUND = "scheduler.not.found";
public static final String DATAFEED_CANNOT_START = "datafeed.cannot.start";
public static final String DATAFEED_CANNOT_STOP_IN_CURRENT_STATE = "datafeed.cannot.stop.in.current.state";
public static final String DATAFEED_CANNOT_UPDATE_IN_CURRENT_STATE = "datafeed.cannot.update.in.current.state";
public static final String DATAFEED_CANNOT_DELETE_IN_CURRENT_STATE = "datafeed.cannot.delete.in.current.state";
public static final String DATAFEED_FAILED_TO_STOP = "datafeed.failed.to.stop";
public static final String DATAFEED_NOT_FOUND = "datafeed.not.found";
public static final String JOB_MISSING_QUANTILES = "job.missing.quantiles";
public static final String JOB_UNKNOWN_ID = "job.unknown.id";
@ -213,7 +213,7 @@ public final class Messages {
public static final String JSON_TRANSFORM_CONFIG_MAPPING = "json.transform.config.mapping.error";
public static final String JSON_TRANSFORM_CONFIG_PARSE = "json.transform.config.parse.error";
public static final String REST_ACTION_NOT_ALLOWED_FOR_SCHEDULED_JOB = "rest.action.not.allowed.for.scheduled.job";
public static final String REST_ACTION_NOT_ALLOWED_FOR_DATAFEED_JOB = "rest.action.not.allowed.for.datafeed.job";
public static final String REST_INVALID_DATETIME_PARAMS = "rest.invalid.datetime.params";
public static final String REST_INVALID_FLUSH_PARAMS_MISSING = "rest.invalid.flush.params.missing.argument";

View File

@ -23,10 +23,10 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.JobStatus;
import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobValidator;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException;
@ -44,7 +44,7 @@ public class MlMetadata implements MetaData.Custom {
private static final ParseField JOBS_FIELD = new ParseField("jobs");
private static final ParseField ALLOCATIONS_FIELD = new ParseField("allocations");
private static final ParseField SCHEDULERS_FIELD = new ParseField("schedulers");
private static final ParseField DATAFEEDS_FIELD = new ParseField("datafeeds");
public static final String TYPE = "ml";
public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(),
@ -56,18 +56,18 @@ public class MlMetadata implements MetaData.Custom {
static {
ML_METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.PARSER.apply(p, c).build(), JOBS_FIELD);
ML_METADATA_PARSER.declareObjectArray(Builder::putAllocations, Allocation.PARSER, ALLOCATIONS_FIELD);
ML_METADATA_PARSER.declareObjectArray(Builder::putSchedulers, Scheduler.PARSER, SCHEDULERS_FIELD);
ML_METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, Datafeed.PARSER, DATAFEEDS_FIELD);
}
private final SortedMap<String, Job> jobs;
private final SortedMap<String, Allocation> allocations;
private final SortedMap<String, Scheduler> schedulers;
private final SortedMap<String, Datafeed> datafeeds;
private MlMetadata(SortedMap<String, Job> jobs, SortedMap<String, Allocation> allocations,
SortedMap<String, Scheduler> schedulers) {
SortedMap<String, Datafeed> datafeeds) {
this.jobs = Collections.unmodifiableSortedMap(jobs);
this.allocations = Collections.unmodifiableSortedMap(allocations);
this.schedulers = Collections.unmodifiableSortedMap(schedulers);
this.datafeeds = Collections.unmodifiableSortedMap(datafeeds);
}
public Map<String, Job> getJobs() {
@ -78,12 +78,12 @@ public class MlMetadata implements MetaData.Custom {
return allocations;
}
public SortedMap<String, Scheduler> getSchedulers() {
return schedulers;
public SortedMap<String, Datafeed> getDatafeeds() {
return datafeeds;
}
public Scheduler getScheduler(String schedulerId) {
return schedulers.get(schedulerId);
public Datafeed getDatafeed(String datafeedId) {
return datafeeds.get(datafeedId);
}
@Override
@ -117,18 +117,18 @@ public class MlMetadata implements MetaData.Custom {
}
this.allocations = allocations;
size = in.readVInt();
TreeMap<String, Scheduler> schedulers = new TreeMap<>();
TreeMap<String, Datafeed> datafeeds = new TreeMap<>();
for (int i = 0; i < size; i++) {
schedulers.put(in.readString(), new Scheduler(in));
datafeeds.put(in.readString(), new Datafeed(in));
}
this.schedulers = schedulers;
this.datafeeds = datafeeds;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
writeMap(jobs, out);
writeMap(allocations, out);
writeMap(schedulers, out);
writeMap(datafeeds, out);
}
private static <T extends Writeable> void writeMap(Map<String, T> map, StreamOutput out) throws IOException {
@ -143,7 +143,7 @@ public class MlMetadata implements MetaData.Custom {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
mapValuesToXContent(JOBS_FIELD, jobs, builder, params);
mapValuesToXContent(ALLOCATIONS_FIELD, allocations, builder, params);
mapValuesToXContent(SCHEDULERS_FIELD, schedulers, builder, params);
mapValuesToXContent(DATAFEEDS_FIELD, datafeeds, builder, params);
return builder;
}
@ -160,27 +160,27 @@ public class MlMetadata implements MetaData.Custom {
final Diff<Map<String, Job>> jobs;
final Diff<Map<String, Allocation>> allocations;
final Diff<Map<String, Scheduler>> schedulers;
final Diff<Map<String, Datafeed>> datafeeds;
MlMetadataDiff(MlMetadata before, MlMetadata after) {
this.jobs = DiffableUtils.diff(before.jobs, after.jobs, DiffableUtils.getStringKeySerializer());
this.allocations = DiffableUtils.diff(before.allocations, after.allocations, DiffableUtils.getStringKeySerializer());
this.schedulers = DiffableUtils.diff(before.schedulers, after.schedulers, DiffableUtils.getStringKeySerializer());
this.datafeeds = DiffableUtils.diff(before.datafeeds, after.datafeeds, DiffableUtils.getStringKeySerializer());
}
@Override
public MetaData.Custom apply(MetaData.Custom part) {
TreeMap<String, Job> newJobs = new TreeMap<>(jobs.apply(((MlMetadata) part).jobs));
TreeMap<String, Allocation> newAllocations = new TreeMap<>(allocations.apply(((MlMetadata) part).allocations));
TreeMap<String, Scheduler> newSchedulers = new TreeMap<>(schedulers.apply(((MlMetadata) part).schedulers));
return new MlMetadata(newJobs, newAllocations, newSchedulers);
TreeMap<String, Datafeed> newDatafeeds = new TreeMap<>(datafeeds.apply(((MlMetadata) part).datafeeds));
return new MlMetadata(newJobs, newAllocations, newDatafeeds);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
jobs.writeTo(out);
allocations.writeTo(out);
schedulers.writeTo(out);
datafeeds.writeTo(out);
}
}
@ -193,30 +193,30 @@ public class MlMetadata implements MetaData.Custom {
MlMetadata that = (MlMetadata) o;
return Objects.equals(jobs, that.jobs) &&
Objects.equals(allocations, that.allocations) &&
Objects.equals(schedulers, that.schedulers);
Objects.equals(datafeeds, that.datafeeds);
}
@Override
public int hashCode() {
return Objects.hash(jobs, allocations, schedulers);
return Objects.hash(jobs, allocations, datafeeds);
}
public static class Builder {
private TreeMap<String, Job> jobs;
private TreeMap<String, Allocation> allocations;
private TreeMap<String, Scheduler> schedulers;
private TreeMap<String, Datafeed> datafeeds;
public Builder() {
this.jobs = new TreeMap<>();
this.allocations = new TreeMap<>();
this.schedulers = new TreeMap<>();
this.datafeeds = new TreeMap<>();
}
public Builder(MlMetadata previous) {
jobs = new TreeMap<>(previous.jobs);
allocations = new TreeMap<>(previous.allocations);
schedulers = new TreeMap<>(previous.schedulers);
datafeeds = new TreeMap<>(previous.datafeeds);
}
public Builder putJob(Job job, boolean overwrite) {
@ -241,10 +241,10 @@ public class MlMetadata implements MetaData.Custom {
throw new ResourceNotFoundException("job [" + jobId + "] does not exist");
}
Optional<Scheduler> scheduler = getSchedulerByJobId(jobId);
if (scheduler.isPresent()) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while scheduler ["
+ scheduler.get().getId() + "] refers to it");
Optional<Datafeed> datafeed = getDatafeedByJobId(jobId);
if (datafeed.isPresent()) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
+ datafeed.get().getId() + "] refers to it");
}
Allocation previousAllocation = this.allocations.remove(jobId);
@ -260,45 +260,45 @@ public class MlMetadata implements MetaData.Custom {
return this;
}
public Builder putScheduler(SchedulerConfig schedulerConfig) {
if (schedulers.containsKey(schedulerConfig.getId())) {
throw new ResourceAlreadyExistsException("A scheduler with id [" + schedulerConfig.getId() + "] already exists");
public Builder putDatafeed(DatafeedConfig datafeedConfig) {
if (datafeeds.containsKey(datafeedConfig.getId())) {
throw new ResourceAlreadyExistsException("A datafeed with id [" + datafeedConfig.getId() + "] already exists");
}
String jobId = schedulerConfig.getJobId();
String jobId = datafeedConfig.getJobId();
Job job = jobs.get(jobId);
if (job == null) {
throw ExceptionsHelper.missingJobException(jobId);
}
Optional<Scheduler> existingScheduler = getSchedulerByJobId(jobId);
if (existingScheduler.isPresent()) {
throw ExceptionsHelper.conflictStatusException("A scheduler [" + existingScheduler.get().getId()
Optional<Datafeed> existingDatafeed = getDatafeedByJobId(jobId);
if (existingDatafeed.isPresent()) {
throw ExceptionsHelper.conflictStatusException("A datafeed [" + existingDatafeed.get().getId()
+ "] already exists for job [" + jobId + "]");
}
ScheduledJobValidator.validate(schedulerConfig, job);
DatafeedJobValidator.validate(datafeedConfig, job);
return putScheduler(new Scheduler(schedulerConfig, SchedulerStatus.STOPPED));
return putDatafeed(new Datafeed(datafeedConfig, DatafeedStatus.STOPPED));
}
private Builder putScheduler(Scheduler scheduler) {
schedulers.put(scheduler.getId(), scheduler);
private Builder putDatafeed(Datafeed datafeed) {
datafeeds.put(datafeed.getId(), datafeed);
return this;
}
public Builder removeScheduler(String schedulerId) {
Scheduler scheduler = schedulers.get(schedulerId);
if (scheduler == null) {
throw ExceptionsHelper.missingSchedulerException(schedulerId);
public Builder removeDatafeed(String datafeedId) {
Datafeed datafeed = datafeeds.get(datafeedId);
if (datafeed == null) {
throw ExceptionsHelper.missingDatafeedException(datafeedId);
}
if (scheduler.getStatus() != SchedulerStatus.STOPPED) {
String msg = Messages.getMessage(Messages.SCHEDULER_CANNOT_DELETE_IN_CURRENT_STATE, schedulerId, scheduler.getStatus());
if (datafeed.getStatus() != DatafeedStatus.STOPPED) {
String msg = Messages.getMessage(Messages.DATAFEED_CANNOT_DELETE_IN_CURRENT_STATE, datafeedId, datafeed.getStatus());
throw ExceptionsHelper.conflictStatusException(msg);
}
schedulers.remove(schedulerId);
datafeeds.remove(datafeedId);
return this;
}
private Optional<Scheduler> getSchedulerByJobId(String jobId) {
return schedulers.values().stream().filter(s -> s.getJobId().equals(jobId)).findFirst();
private Optional<Datafeed> getDatafeedByJobId(String jobId) {
return datafeeds.values().stream().filter(s -> s.getJobId().equals(jobId)).findFirst();
}
// only for parsing
@ -317,15 +317,15 @@ public class MlMetadata implements MetaData.Custom {
return this;
}
private Builder putSchedulers(Collection<Scheduler> schedulers) {
for (Scheduler scheduler : schedulers) {
putScheduler(scheduler);
private Builder putDatafeeds(Collection<Datafeed> datafeeds) {
for (Datafeed datafeed : datafeeds) {
putDatafeed(datafeed);
}
return this;
}
public MlMetadata build() {
return new MlMetadata(jobs, allocations, schedulers);
return new MlMetadata(jobs, allocations, datafeeds);
}
public Builder assignToNode(String jobId, String nodeId) {
@ -349,12 +349,12 @@ public class MlMetadata implements MetaData.Custom {
throw new IllegalStateException("[" + jobId + "] no allocation exist to update the status to [" + jobStatus + "]");
}
// Cannot update the status to DELETING if there are schedulers attached
// Cannot update the status to DELETING if there are datafeeds attached
if (jobStatus.equals(JobStatus.DELETING)) {
Optional<Scheduler> scheduler = getSchedulerByJobId(jobId);
if (scheduler.isPresent()) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while scheduler ["
+ scheduler.get().getId() + "] refers to it");
Optional<Datafeed> datafeed = getDatafeedByJobId(jobId);
if (datafeed.isPresent()) {
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
+ datafeed.get().getId() + "] refers to it");
}
}
@ -397,32 +397,32 @@ public class MlMetadata implements MetaData.Custom {
return this;
}
public Builder updateSchedulerStatus(String schedulerId, SchedulerStatus newStatus) {
Scheduler scheduler = schedulers.get(schedulerId);
if (scheduler == null) {
throw ExceptionsHelper.missingSchedulerException(schedulerId);
public Builder updateDatafeedStatus(String datafeedId, DatafeedStatus newStatus) {
Datafeed datafeed = datafeeds.get(datafeedId);
if (datafeed == null) {
throw ExceptionsHelper.missingDatafeedException(datafeedId);
}
SchedulerStatus currentStatus = scheduler.getStatus();
DatafeedStatus currentStatus = datafeed.getStatus();
switch (newStatus) {
case STARTED:
if (currentStatus != SchedulerStatus.STOPPED) {
String msg = Messages.getMessage(Messages.SCHEDULER_CANNOT_START, schedulerId, newStatus);
if (currentStatus != DatafeedStatus.STOPPED) {
String msg = Messages.getMessage(Messages.DATAFEED_CANNOT_START, datafeedId, newStatus);
throw ExceptionsHelper.conflictStatusException(msg);
}
break;
case STOPPED:
if (currentStatus != SchedulerStatus.STARTED) {
String msg = Messages.getMessage(Messages.SCHEDULER_CANNOT_STOP_IN_CURRENT_STATE, schedulerId, newStatus);
if (currentStatus != DatafeedStatus.STARTED) {
String msg = Messages.getMessage(Messages.DATAFEED_CANNOT_STOP_IN_CURRENT_STATE, datafeedId, newStatus);
throw ExceptionsHelper.conflictStatusException(msg);
}
break;
default:
throw new IllegalArgumentException("[" + schedulerId + "] requested invalid scheduler status [" + newStatus + "]");
throw new IllegalArgumentException("[" + datafeedId + "] requested invalid datafeed status [" + newStatus + "]");
}
schedulers.put(schedulerId, new Scheduler(scheduler.getConfig(), newStatus));
datafeeds.put(datafeedId, new Datafeed(datafeed.getConfig(), newStatus));
return this;
}
}
}
}

View File

@ -38,9 +38,9 @@ import java.util.function.Function;
* changes when each of the reporting stages are passed. If the
* function returns {@code true} the usage is logged.
*
* DataCounts are persisted periodically in a scheduled task via
* DataCounts are persisted periodically in a datafeed task via
* {@linkplain JobDataCountsPersister}, {@link #close()} must be called to
* cancel the scheduled task.
* cancel the datafeed task.
*/
public class StatusReporter extends AbstractComponent implements Closeable {
/**
@ -78,7 +78,7 @@ public class StatusReporter extends AbstractComponent implements Closeable {
private Function<Long, Boolean> reportingBoundaryFunction;
private volatile boolean persistDataCountsOnNextRecord;
private final ThreadPool.Cancellable persistDataCountsScheduledAction;
private final ThreadPool.Cancellable persistDataCountsDatafeedAction;
public StatusReporter(ThreadPool threadPool, Settings settings, String jobId, DataCounts counts, UsageReporter usageReporter,
JobDataCountsPersister dataCountsPersister) {
@ -97,7 +97,7 @@ public class StatusReporter extends AbstractComponent implements Closeable {
reportingBoundaryFunction = this::reportEvery100Records;
persistDataCountsScheduledAction = threadPool.scheduleWithFixedDelay(() -> persistDataCountsOnNextRecord = true,
persistDataCountsDatafeedAction = threadPool.scheduleWithFixedDelay(() -> persistDataCountsOnNextRecord = true,
PERSIST_INTERVAL, ThreadPool.Names.GENERIC);
}
@ -353,7 +353,7 @@ public class StatusReporter extends AbstractComponent implements Closeable {
@Override
public void close() {
persistDataCountsScheduledAction.cancel();
persistDataCountsDatafeedAction.cancel();
}
/**

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.rest.schedulers;
package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
@ -13,25 +13,25 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.AcknowledgedRestListener;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.DeleteSchedulerAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException;
public class RestDeleteSchedulerAction extends BaseRestHandler {
public class RestDeleteDatafeedAction extends BaseRestHandler {
@Inject
public RestDeleteSchedulerAction(Settings settings, RestController controller) {
public RestDeleteDatafeedAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.DELETE, MlPlugin.BASE_PATH + "schedulers/{"
+ SchedulerConfig.ID.getPreferredName() + "}", this);
controller.registerHandler(RestRequest.Method.DELETE, MlPlugin.BASE_PATH + "datafeeds/{"
+ DatafeedConfig.ID.getPreferredName() + "}", this);
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName());
DeleteSchedulerAction.Request deleteSchedulerRequest = new DeleteSchedulerAction.Request(schedulerId);
return channel -> client.execute(DeleteSchedulerAction.INSTANCE, deleteSchedulerRequest, new AcknowledgedRestListener<>(channel));
String datafeedId = restRequest.param(DatafeedConfig.ID.getPreferredName());
DeleteDatafeedAction.Request deleteDatafeedRequest = new DeleteDatafeedAction.Request(datafeedId);
return channel -> client.execute(DeleteDatafeedAction.INSTANCE, deleteDatafeedRequest, new AcknowledgedRestListener<>(channel));
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.rest.schedulers;
package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
@ -13,23 +13,23 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.GetSchedulersAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.action.GetDatafeedsAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException;
public class RestGetSchedulersAction extends BaseRestHandler {
public class RestGetDatafeedsAction extends BaseRestHandler {
@Inject
public RestGetSchedulersAction(Settings settings, RestController controller) {
public RestGetDatafeedsAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH
+ "schedulers/{" + SchedulerConfig.ID.getPreferredName() + "}", this);
+ "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this);
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
GetSchedulersAction.Request request = new GetSchedulersAction.Request(restRequest.param(SchedulerConfig.ID.getPreferredName()));
return channel -> client.execute(GetSchedulersAction.INSTANCE, request, new RestToXContentListener<>(channel));
GetDatafeedsAction.Request request = new GetDatafeedsAction.Request(restRequest.param(DatafeedConfig.ID.getPreferredName()));
return channel -> client.execute(GetDatafeedsAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.rest.schedulers;
package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
@ -13,24 +13,24 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.GetSchedulersStatsAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException;
public class RestGetSchedulersStatsAction extends BaseRestHandler {
public class RestGetDatafeedsStatsAction extends BaseRestHandler {
@Inject
public RestGetSchedulersStatsAction(Settings settings, RestController controller) {
public RestGetDatafeedsStatsAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH
+ "schedulers/{" + SchedulerConfig.ID.getPreferredName() + "}/_stats", this);
+ "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats", this);
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
GetSchedulersStatsAction.Request request = new GetSchedulersStatsAction.Request(
restRequest.param(SchedulerConfig.ID.getPreferredName()));
return channel -> client.execute(GetSchedulersStatsAction.INSTANCE, request, new RestToXContentListener<>(channel));
GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(
restRequest.param(DatafeedConfig.ID.getPreferredName()));
return channel -> client.execute(GetDatafeedsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.rest.schedulers;
package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
@ -14,26 +14,26 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.PutSchedulerAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.action.PutDatafeedAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException;
public class RestPutSchedulerAction extends BaseRestHandler {
public class RestPutDatafeedAction extends BaseRestHandler {
@Inject
public RestPutSchedulerAction(Settings settings, RestController controller) {
public RestPutDatafeedAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.PUT, MlPlugin.BASE_PATH + "schedulers/{"
+ SchedulerConfig.ID.getPreferredName() + "}", this);
controller.registerHandler(RestRequest.Method.PUT, MlPlugin.BASE_PATH + "datafeeds/{"
+ DatafeedConfig.ID.getPreferredName() + "}", this);
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName());
String datafeedId = restRequest.param(DatafeedConfig.ID.getPreferredName());
XContentParser parser = restRequest.contentParser();
PutSchedulerAction.Request putSchedulerRequest = PutSchedulerAction.Request.parseRequest(schedulerId, parser);
return channel -> client.execute(PutSchedulerAction.INSTANCE, putSchedulerRequest, new RestToXContentListener<>(channel));
PutDatafeedAction.Request putDatafeedRequest = PutDatafeedAction.Request.parseRequest(datafeedId, parser);
return channel -> client.execute(PutDatafeedAction.INSTANCE, putDatafeedRequest, new RestToXContentListener<>(channel));
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.rest.schedulers;
package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.node.NodeClient;
@ -17,46 +17,46 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.StartSchedulerAction;
import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException;
public class RestStartSchedulerAction extends BaseRestHandler {
public class RestStartDatafeedAction extends BaseRestHandler {
private static final String DEFAULT_START = "0";
@Inject
public RestStartSchedulerAction(Settings settings, RestController controller) {
public RestStartDatafeedAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.POST,
MlPlugin.BASE_PATH + "schedulers/{" + SchedulerConfig.ID.getPreferredName() + "}/_start", this);
MlPlugin.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start", this);
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName());
StartSchedulerAction.Request jobSchedulerRequest;
String datafeedId = restRequest.param(DatafeedConfig.ID.getPreferredName());
StartDatafeedAction.Request jobDatafeedRequest;
if (restRequest.hasContentOrSourceParam()) {
XContentParser parser = restRequest.contentOrSourceParamParser();
jobSchedulerRequest = StartSchedulerAction.Request.parseRequest(schedulerId, parser);
jobDatafeedRequest = StartDatafeedAction.Request.parseRequest(datafeedId, parser);
} else {
long startTimeMillis = parseDateOrThrow(restRequest.param(StartSchedulerAction.START_TIME.getPreferredName(),
DEFAULT_START), StartSchedulerAction.START_TIME.getPreferredName());
long startTimeMillis = parseDateOrThrow(restRequest.param(StartDatafeedAction.START_TIME.getPreferredName(),
DEFAULT_START), StartDatafeedAction.START_TIME.getPreferredName());
Long endTimeMillis = null;
if (restRequest.hasParam(StartSchedulerAction.END_TIME.getPreferredName())) {
endTimeMillis = parseDateOrThrow(restRequest.param(StartSchedulerAction.END_TIME.getPreferredName()),
StartSchedulerAction.END_TIME.getPreferredName());
if (restRequest.hasParam(StartDatafeedAction.END_TIME.getPreferredName())) {
endTimeMillis = parseDateOrThrow(restRequest.param(StartDatafeedAction.END_TIME.getPreferredName()),
StartDatafeedAction.END_TIME.getPreferredName());
}
jobSchedulerRequest = new StartSchedulerAction.Request(schedulerId, startTimeMillis);
jobSchedulerRequest.setEndTime(endTimeMillis);
TimeValue startTimeout = restRequest.paramAsTime(StartSchedulerAction.START_TIMEOUT.getPreferredName(),
jobDatafeedRequest = new StartDatafeedAction.Request(datafeedId, startTimeMillis);
jobDatafeedRequest.setEndTime(endTimeMillis);
TimeValue startTimeout = restRequest.paramAsTime(StartDatafeedAction.START_TIMEOUT.getPreferredName(),
TimeValue.timeValueSeconds(30));
jobSchedulerRequest.setStartTimeout(startTimeout);
jobDatafeedRequest.setStartTimeout(startTimeout);
}
return channel -> {
client.execute(StartSchedulerAction.INSTANCE, jobSchedulerRequest, new RestToXContentListener<>(channel));
client.execute(StartDatafeedAction.INSTANCE, jobDatafeedRequest, new RestToXContentListener<>(channel));
};
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.rest.schedulers;
package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
@ -14,27 +14,27 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.AcknowledgedRestListener;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.StopSchedulerAction;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.action.StopDatafeedAction;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.io.IOException;
public class RestStopSchedulerAction extends BaseRestHandler {
public class RestStopDatafeedAction extends BaseRestHandler {
@Inject
public RestStopSchedulerAction(Settings settings, RestController controller) {
public RestStopDatafeedAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "schedulers/{"
+ SchedulerConfig.ID.getPreferredName() + "}/_stop", this);
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "datafeeds/{"
+ DatafeedConfig.ID.getPreferredName() + "}/_stop", this);
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
StopSchedulerAction.Request jobSchedulerRequest = new StopSchedulerAction.Request(
restRequest.param(SchedulerConfig.ID.getPreferredName()));
StopDatafeedAction.Request jobDatafeedRequest = new StopDatafeedAction.Request(
restRequest.param(DatafeedConfig.ID.getPreferredName()));
if (restRequest.hasParam("stop_timeout")) {
jobSchedulerRequest.setStopTimeout(TimeValue.parseTimeValue(restRequest.param("stop_timeout"), "stop_timeout"));
jobDatafeedRequest.setStopTimeout(TimeValue.parseTimeValue(restRequest.param("stop_timeout"), "stop_timeout"));
}
return channel -> client.execute(StopSchedulerAction.INSTANCE, jobSchedulerRequest, new AcknowledgedRestListener<>(channel));
return channel -> client.execute(StopDatafeedAction.INSTANCE, jobDatafeedRequest, new AcknowledgedRestListener<>(channel));
}
}

View File

@ -13,25 +13,25 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import java.util.function.Consumer;
import java.util.function.Predicate;
public class SchedulerStatusObserver {
public class DatafeedStatusObserver {
private static final Logger LOGGER = Loggers.getLogger(SchedulerStatusObserver.class);
private static final Logger LOGGER = Loggers.getLogger(DatafeedStatusObserver.class);
private final ThreadPool threadPool;
private final ClusterService clusterService;
public SchedulerStatusObserver(ThreadPool threadPool, ClusterService clusterService) {
public DatafeedStatusObserver(ThreadPool threadPool, ClusterService clusterService) {
this.threadPool = threadPool;
this.clusterService = clusterService;
}
public void waitForStatus(String schedulerId, TimeValue waitTimeout, SchedulerStatus expectedStatus, Consumer<Exception> handler) {
public void waitForStatus(String datafeedId, TimeValue waitTimeout, DatafeedStatus expectedStatus, Consumer<Exception> handler) {
ClusterStateObserver observer =
new ClusterStateObserver(clusterService, LOGGER, threadPool.getThreadContext());
observer.waitForNextChange(new ClusterStateObserver.Listener() {
@ -42,27 +42,27 @@ public class SchedulerStatusObserver {
@Override
public void onClusterServiceClose() {
Exception e = new IllegalArgumentException("Cluster service closed while waiting for scheduler status to change to ["
Exception e = new IllegalArgumentException("Cluster service closed while waiting for datafeed status to change to ["
+ expectedStatus + "]");
handler.accept(new IllegalStateException(e));
}
@Override
public void onTimeout(TimeValue timeout) {
Exception e = new IllegalArgumentException("Timeout expired while waiting for scheduler status to change to ["
Exception e = new IllegalArgumentException("Timeout expired while waiting for datafeed status to change to ["
+ expectedStatus + "]");
handler.accept(e);
}
}, new SchedulerStoppedPredicate(schedulerId, expectedStatus), waitTimeout);
}, new DatafeedStoppedPredicate(datafeedId, expectedStatus), waitTimeout);
}
private static class SchedulerStoppedPredicate implements Predicate<ClusterState> {
private static class DatafeedStoppedPredicate implements Predicate<ClusterState> {
private final String schedulerId;
private final SchedulerStatus expectedStatus;
private final String datafeedId;
private final DatafeedStatus expectedStatus;
SchedulerStoppedPredicate(String schedulerId, SchedulerStatus expectedStatus) {
this.schedulerId = schedulerId;
DatafeedStoppedPredicate(String datafeedId, DatafeedStatus expectedStatus) {
this.datafeedId = datafeedId;
this.expectedStatus = expectedStatus;
}
@ -70,9 +70,9 @@ public class SchedulerStatusObserver {
public boolean test(ClusterState newState) {
MlMetadata metadata = newState.getMetaData().custom(MlMetadata.TYPE);
if (metadata != null) {
Scheduler scheduler = metadata.getScheduler(schedulerId);
if (scheduler != null) {
return scheduler.getStatus() == expectedStatus;
Datafeed datafeed = metadata.getDatafeed(datafeedId);
if (datafeed != null) {
return datafeed.getStatus() == expectedStatus;
}
}
return false;

View File

@ -25,8 +25,8 @@ public class ExceptionsHelper {
throw new ResourceAlreadyExistsException(Messages.getMessage(Messages.JOB_CONFIG_ID_ALREADY_TAKEN, jobId));
}
public static ResourceNotFoundException missingSchedulerException(String schedulerId) {
throw new ResourceNotFoundException(Messages.getMessage(Messages.SCHEDULER_NOT_FOUND, schedulerId));
public static ResourceNotFoundException missingDatafeedException(String datafeedId) {
throw new ResourceNotFoundException(Messages.getMessage(Messages.DATAFEED_NOT_FOUND, datafeedId));
}
public static ElasticsearchException serverError(String msg) {

View File

@ -27,16 +27,16 @@ job.audit.updated = Job updated: {0}
job.audit.reverted = Job model snapshot reverted to ''{0}''
job.audit.old.results.deleted = Deleted results prior to {0}
job.audit.snapshot.deleted = Job model snapshot ''{0}'' deleted
job.audit.scheduler.started.from.to = Scheduler started (from: {0} to: {1})
job.audit.scheduler.started.realtime = Scheduler started in real-time
job.audit.scheduler.continued.realtime = Scheduler continued in real-time
job.audit.scheduler.lookback.completed = Scheduler lookback completed
job.audit.scheduler.stopped = Scheduler stopped
job.audit.scheduler.no.data = Scheduler has been retrieving no data for a while
job.audit.scheduler.data.seen.again = Scheduler has started retrieving data again
job.audit.scheduler.data.analysis.error = Scheduler is encountering errors submitting data for analysis: {0}
job.audit.scheduler.data.extraction.error = Scheduler is encountering errors extracting data: {0}
job.audit.scheduler.recovered = Scheduler has recovered data extraction and analysis
job.audit.datafeed.started.from.to = Datafeed started (from: {0} to: {1})
job.audit.datafeed.started.realtime = Datafeed started in real-time
job.audit.datafeed.continued.realtime = Datafeed continued in real-time
job.audit.datafeed.lookback.completed = Datafeed lookback completed
job.audit.datafeed.stopped = Datafeed stopped
job.audit.datafeed.no.data = Datafeed has been retrieving no data for a while
job.audit.datafeed.data.seen.again = Datafeed has started retrieving data again
job.audit.datafeed.data.analysis.error = Datafeed is encountering errors submitting data for analysis: {0}
job.audit.datafeed.data.extraction.error = Datafeed is encountering errors extracting data: {0}
job.audit.datafeed.recovered = Datafeed has recovered data extraction and analysis
system.audit.started = System started
system.audit.shutdown = System shut down
@ -115,8 +115,8 @@ job.config.update.background.persist.interval.invalid = Invalid update value for
job.config.update.renormalization.window.days.invalid = Invalid update value for renormalization_window_days: value must be an exact number of days
job.config.update.model.snapshot.retention.days.invalid = Invalid update value for model_snapshot_retention_days: value must be an exact number of days
job.config.update.results.retention.days.invalid = Invalid update value for results_retention_days: value must be an exact number of days
job.config.update.scheduler.config.parse.error = JSON parse error reading the update value for scheduler_config
job.config.update.scheduler.config.cannot.be.null = Invalid update value for scheduler_config: null
job.config.update.datafeed.config.parse.error = JSON parse error reading the update value for datafeed_config
job.config.update.datafeed.config.cannot.be.null = Invalid update value for datafeed_config: null
job.config.transform.circular.dependency = Transform type {0} with inputs {1} has a circular dependency
job.config.transform.condition.required = A condition must be defined for transform ''{0}''
@ -135,10 +135,10 @@ job.config.unknown.function = Unknown function ''{0}''
job.index.already.exists = Cannot create index ''{0}'' as it already exists
scheduler.config.invalid.option.value = Invalid {0} value ''{1}'' in scheduler configuration
datafeed.config.invalid.option.value = Invalid {0} value ''{1}'' in datafeed configuration
scheduler.does.not.support.job.with.latency = A job configured with scheduler cannot support latency
scheduler.aggregations.requires.job.with.summary.count.field = A job configured with a scheduler with aggregations must have summary_count_field_name ''{0}''
datafeed.does.not.support.job.with.latency = A job configured with datafeed cannot support latency
datafeed.aggregations.requires.job.with.summary.count.field = A job configured with a datafeed with aggregations must have summary_count_field_name ''{0}''
job.data.concurrent.use.close = Cannot close job {0} while another connection {2}is {1} the job
job.data.concurrent.use.flush = Cannot flush job {0} while another connection {2}is {1} the job
@ -151,12 +151,12 @@ job.data.concurrent.use.upload = Cannot write to job {0} while another connectio
job.missing.quantiles = Cannot read persisted quantiles for job ''{0}''
job.unknown.id = No known job with id ''{0}''
scheduler.cannot.start = Cannot start scheduler [{0}] while its status is {1}
scheduler.cannot.stop.in.current.state = Cannot stop scheduler [{0}] while its status is {1}
scheduler.cannot.update.in.current.state = Cannot update scheduler [{0}] while its status is {1}
scheduler.cannot.delete.in.current.state = Cannot delete scheduler [{0}] while its status is {1}
scheduler.failed.to.stop = Failed to stop scheduler
scheduler.not.found = No scheduler with id [{0}] exists
datafeed.cannot.start = Cannot start datafeed [{0}] while its status is {1}
datafeed.cannot.stop.in.current.state = Cannot stop datafeed [{0}] while its status is {1}
datafeed.cannot.update.in.current.state = Cannot update datafeed [{0}] while its status is {1}
datafeed.cannot.delete.in.current.state = Cannot delete datafeed [{0}] while its status is {1}
datafeed.failed.to.stop = Failed to stop datafeed
datafeed.not.found = No datafeed with id [{0}] exists
json.job.config.mapping.error = JSON mapping error reading the job configuration
json.job.config.parse.error = JSON parse error reading the job configuration
@ -170,7 +170,7 @@ json.list.document.parse.error = JSON parse error reading the list
json.transform.config.mapping.error = JSON mapping error reading the transform configuration
json.transform.config.parse.error = JSON parse error reading the transform configuration
rest.action.not.allowed.for.scheduled.job = This action is not allowed for a scheduled job
rest.action.not.allowed.for.datafeed.job = This action is not allowed for a datafeed job
rest.invalid.datetime.params = Query param ''{0}'' with value ''{1}'' cannot be parsed as a date or converted to a number (epoch).
rest.invalid.flush.params.missing.argument = Invalid flush parameters: ''{0}'' has not been specified.

View File

@ -27,9 +27,9 @@ import org.elasticsearch.xpack.ml.job.Detector;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.junit.After;
import java.io.IOException;
@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
@ESIntegTestCase.ClusterScope(numDataNodes = 1)
public class ScheduledJobsIT extends ESIntegTestCase {
public class DatafeedJobsIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
@ -85,16 +85,16 @@ public class ScheduledJobsIT extends ESIntegTestCase {
OpenJobAction.Response openJobResponse = client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).get();
assertTrue(openJobResponse.isAcknowledged());
SchedulerConfig schedulerConfig = createScheduler(job.getId() + "-scheduler", job.getId(), Collections.singletonList("data-*"));
PutSchedulerAction.Request putSchedulerRequest = new PutSchedulerAction.Request(schedulerConfig);
PutSchedulerAction.Response putSchedulerResponse = client().execute(PutSchedulerAction.INSTANCE, putSchedulerRequest).get();
assertTrue(putSchedulerResponse.isAcknowledged());
DatafeedConfig datafeedConfig = createDatafeed(job.getId() + "-datafeed", job.getId(), Collections.singletonList("data-*"));
PutDatafeedAction.Request putDatafeedRequest = new PutDatafeedAction.Request(datafeedConfig);
PutDatafeedAction.Response putDatafeedResponse = client().execute(PutDatafeedAction.INSTANCE, putDatafeedRequest).get();
assertTrue(putDatafeedResponse.isAcknowledged());
StartSchedulerAction.Request startSchedulerRequest = new StartSchedulerAction.Request(schedulerConfig.getId(), 0L);
startSchedulerRequest.setEndTime(now);
StartSchedulerAction.Response startSchedulerResponse =
client().execute(StartSchedulerAction.INSTANCE, startSchedulerRequest).get();
assertTrue(startSchedulerResponse.isStarted());
StartDatafeedAction.Request startDatafeedRequest = new StartDatafeedAction.Request(datafeedConfig.getId(), 0L);
startDatafeedRequest.setEndTime(now);
StartDatafeedAction.Response startDatafeedResponse =
client().execute(StartDatafeedAction.INSTANCE, startDatafeedRequest).get();
assertTrue(startDatafeedResponse.isStarted());
assertBusy(() -> {
DataCounts dataCounts = getDataCounts(job.getId());
assertThat(dataCounts.getProcessedRecordCount(), equalTo(numDocs + numDocs2));
@ -102,7 +102,7 @@ public class ScheduledJobsIT extends ESIntegTestCase {
MlMetadata mlMetadata = client().admin().cluster().prepareState().all().get()
.getState().metaData().custom(MlMetadata.TYPE);
assertThat(mlMetadata.getScheduler(schedulerConfig.getId()).get().getStatus(), equalTo(SchedulerStatus.STOPPED));
assertThat(mlMetadata.getDatafeed(datafeedConfig.getId()).get().getStatus(), equalTo(DatafeedStatus.STOPPED));
});
}
@ -122,15 +122,15 @@ public class ScheduledJobsIT extends ESIntegTestCase {
OpenJobAction.Response openJobResponse = client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).get();
assertTrue(openJobResponse.isAcknowledged());
SchedulerConfig schedulerConfig = createScheduler(job.getId() + "-scheduler", job.getId(), Collections.singletonList("data"));
PutSchedulerAction.Request putSchedulerRequest = new PutSchedulerAction.Request(schedulerConfig);
PutSchedulerAction.Response putSchedulerResponse = client().execute(PutSchedulerAction.INSTANCE, putSchedulerRequest).get();
assertTrue(putSchedulerResponse.isAcknowledged());
DatafeedConfig datafeedConfig = createDatafeed(job.getId() + "-datafeed", job.getId(), Collections.singletonList("data"));
PutDatafeedAction.Request putDatafeedRequest = new PutDatafeedAction.Request(datafeedConfig);
PutDatafeedAction.Response putDatafeedResponse = client().execute(PutDatafeedAction.INSTANCE, putDatafeedRequest).get();
assertTrue(putDatafeedResponse.isAcknowledged());
StartSchedulerAction.Request startSchedulerRequest = new StartSchedulerAction.Request(schedulerConfig.getId(), 0L);
StartSchedulerAction.Response startSchedulerResponse =
client().execute(StartSchedulerAction.INSTANCE, startSchedulerRequest).get();
assertTrue(startSchedulerResponse.isStarted());
StartDatafeedAction.Request startDatafeedRequest = new StartDatafeedAction.Request(datafeedConfig.getId(), 0L);
StartDatafeedAction.Response startDatafeedResponse =
client().execute(StartDatafeedAction.INSTANCE, startDatafeedRequest).get();
assertTrue(startDatafeedResponse.isStarted());
assertBusy(() -> {
DataCounts dataCounts = getDataCounts(job.getId());
assertThat(dataCounts.getProcessedRecordCount(), equalTo(numDocs1));
@ -146,13 +146,13 @@ public class ScheduledJobsIT extends ESIntegTestCase {
assertThat(dataCounts.getOutOfOrderTimeStampCount(), equalTo(0L));
}, 30, TimeUnit.SECONDS);
StopSchedulerAction.Request stopSchedulerRequest = new StopSchedulerAction.Request(schedulerConfig.getId());
StopSchedulerAction.Response stopJobResponse = client().execute(StopSchedulerAction.INSTANCE, stopSchedulerRequest).get();
StopDatafeedAction.Request stopDatafeedRequest = new StopDatafeedAction.Request(datafeedConfig.getId());
StopDatafeedAction.Response stopJobResponse = client().execute(StopDatafeedAction.INSTANCE, stopDatafeedRequest).get();
assertTrue(stopJobResponse.isAcknowledged());
assertBusy(() -> {
MlMetadata mlMetadata = client().admin().cluster().prepareState().all().get()
.getState().metaData().custom(MlMetadata.TYPE);
assertThat(mlMetadata.getScheduler(schedulerConfig.getId()).get().getStatus(), equalTo(SchedulerStatus.STOPPED));
assertThat(mlMetadata.getDatafeed(datafeedConfig.getId()).get().getStatus(), equalTo(DatafeedStatus.STOPPED));
});
}
@ -189,8 +189,8 @@ public class ScheduledJobsIT extends ESIntegTestCase {
return builder;
}
private SchedulerConfig createScheduler(String schedulerId, String jobId, List<String> indexes) {
SchedulerConfig.Builder builder = new SchedulerConfig.Builder(schedulerId, jobId);
private DatafeedConfig createDatafeed(String datafeedId, String jobId, List<String> indexes) {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedId, jobId);
builder.setQueryDelay(1);
builder.setFrequency(2);
builder.setIndexes(indexes);
@ -213,36 +213,36 @@ public class ScheduledJobsIT extends ESIntegTestCase {
}
public static void clearMlMetadata(Client client) throws Exception {
deleteAllSchedulers(client);
deleteAllDatafeeds(client);
deleteAllJobs(client);
}
private static void deleteAllSchedulers(Client client) throws Exception {
private static void deleteAllDatafeeds(Client client) throws Exception {
MetaData metaData = client.admin().cluster().prepareState().get().getState().getMetaData();
MlMetadata mlMetadata = metaData.custom(MlMetadata.TYPE);
for (Scheduler scheduler : mlMetadata.getSchedulers().values()) {
String schedulerId = scheduler.getId();
for (Datafeed datafeed : mlMetadata.getDatafeeds().values()) {
String datafeedId = datafeed.getId();
try {
StopSchedulerAction.Response stopResponse =
client.execute(StopSchedulerAction.INSTANCE, new StopSchedulerAction.Request(schedulerId)).get();
StopDatafeedAction.Response stopResponse =
client.execute(StopDatafeedAction.INSTANCE, new StopDatafeedAction.Request(datafeedId)).get();
assertTrue(stopResponse.isAcknowledged());
} catch (ExecutionException e) {
// CONFLICT is ok, as it means the scheduler has already stopped, which isn't an issue at all.
// CONFLICT is ok, as it means the datafeed has already stopped, which isn't an issue at all.
if (RestStatus.CONFLICT != ExceptionsHelper.status(e.getCause())) {
throw new RuntimeException(e);
}
}
assertBusy(() -> {
try {
GetSchedulersStatsAction.Request request = new GetSchedulersStatsAction.Request(schedulerId);
GetSchedulersStatsAction.Response r = client.execute(GetSchedulersStatsAction.INSTANCE, request).get();
assertThat(r.getResponse().results().get(0).getSchedulerStatus(), equalTo(SchedulerStatus.STOPPED));
GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(datafeedId);
GetDatafeedsStatsAction.Response r = client.execute(GetDatafeedsStatsAction.INSTANCE, request).get();
assertThat(r.getResponse().results().get(0).getDatafeedStatus(), equalTo(DatafeedStatus.STOPPED));
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
});
DeleteSchedulerAction.Response deleteResponse =
client.execute(DeleteSchedulerAction.INSTANCE, new DeleteSchedulerAction.Request(schedulerId)).get();
DeleteDatafeedAction.Response deleteResponse =
client.execute(DeleteDatafeedAction.INSTANCE, new DeleteDatafeedAction.Request(datafeedId)).get();
assertTrue(deleteResponse.isAcknowledged());
}
}

View File

@ -5,10 +5,10 @@
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.DeleteSchedulerAction.Request;
import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction.Request;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class DeleteSchedulerRequestTests extends AbstractStreamableTestCase<Request> {
public class DeleteDatafeedRequestTests extends AbstractStreamableTestCase<Request> {
@Override
protected Request createTestInstance() {
@ -19,4 +19,4 @@ public class DeleteSchedulerRequestTests extends AbstractStreamableTestCase<Requ
protected Request createBlankInstance() {
return new Request();
}
}
}

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetSchedulersAction.Request;
import org.elasticsearch.xpack.ml.action.GetDatafeedsAction.Request;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class GetSchedulersActionRequestTests extends AbstractStreamableTestCase<Request> {
public class GetDatafeedsActionRequestTests extends AbstractStreamableTestCase<Request> {
@Override
protected Request createTestInstance() {

View File

@ -10,35 +10,35 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.ml.action.GetSchedulersAction.Response;
import org.elasticsearch.xpack.ml.action.GetDatafeedsAction.Response;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfigTests;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class GetSchedulersActionResponseTests extends AbstractStreamableTestCase<Response> {
public class GetDatafeedsActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
final Response result;
int listSize = randomInt(10);
List<SchedulerConfig> schedulerList = new ArrayList<>(listSize);
List<DatafeedConfig> datafeedList = new ArrayList<>(listSize);
for (int j = 0; j < listSize; j++) {
String schedulerId = SchedulerConfigTests.randomValidSchedulerId();
String datafeedId = DatafeedConfigTests.randomValidDatafeedId();
String jobId = randomAsciiOfLength(10);
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(schedulerId, jobId);
schedulerConfig.setIndexes(randomSubsetOf(2, Arrays.asList("index-1", "index-2", "index-3")));
schedulerConfig.setTypes(randomSubsetOf(2, Arrays.asList("type-1", "type-2", "type-3")));
schedulerConfig.setFrequency(randomNonNegativeLong());
schedulerConfig.setQueryDelay(randomNonNegativeLong());
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, jobId);
datafeedConfig.setIndexes(randomSubsetOf(2, Arrays.asList("index-1", "index-2", "index-3")));
datafeedConfig.setTypes(randomSubsetOf(2, Arrays.asList("type-1", "type-2", "type-3")));
datafeedConfig.setFrequency(randomNonNegativeLong());
datafeedConfig.setQueryDelay(randomNonNegativeLong());
if (randomBoolean()) {
schedulerConfig.setQuery(QueryBuilders.termQuery(randomAsciiOfLength(10), randomAsciiOfLength(10)));
datafeedConfig.setQuery(QueryBuilders.termQuery(randomAsciiOfLength(10), randomAsciiOfLength(10)));
}
if (randomBoolean()) {
int scriptsSize = randomInt(3);
@ -47,21 +47,21 @@ public class GetSchedulersActionResponseTests extends AbstractStreamableTestCase
scriptFields.add(new SearchSourceBuilder.ScriptField(randomAsciiOfLength(10), new Script(randomAsciiOfLength(10)),
randomBoolean()));
}
schedulerConfig.setScriptFields(scriptFields);
datafeedConfig.setScriptFields(scriptFields);
}
if (randomBoolean()) {
schedulerConfig.setScrollSize(randomIntBetween(0, Integer.MAX_VALUE));
datafeedConfig.setScrollSize(randomIntBetween(0, Integer.MAX_VALUE));
}
if (randomBoolean()) {
AggregatorFactories.Builder aggsBuilder = new AggregatorFactories.Builder();
aggsBuilder.addAggregator(AggregationBuilders.avg(randomAsciiOfLength(10)));
schedulerConfig.setAggregations(aggsBuilder);
datafeedConfig.setAggregations(aggsBuilder);
}
schedulerList.add(schedulerConfig.build());
datafeedList.add(datafeedConfig.build());
}
result = new Response(new QueryPage<>(schedulerList, schedulerList.size(), Scheduler.RESULTS_FIELD));
result = new Response(new QueryPage<>(datafeedList, datafeedList.size(), Datafeed.RESULTS_FIELD));
return result;
}

View File

@ -5,11 +5,11 @@
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetSchedulersStatsAction.Request;
import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction.Request;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class GetSchedulersStatsActionRequestTests extends AbstractStreamableTestCase<Request> {
public class GetDatafeedsStatsActionRequestTests extends AbstractStreamableTestCase<Request> {
@Override
protected Request createTestInstance() {

View File

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction.Response;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.datafeed.Datafeed;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.ArrayList;
import java.util.List;
public class GetDatafeedsStatsActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
final Response result;
int listSize = randomInt(10);
List<Response.DatafeedStats> datafeedStatsList = new ArrayList<>(listSize);
for (int j = 0; j < listSize; j++) {
String datafeedId = randomAsciiOfLength(10);
DatafeedStatus datafeedStatus = randomFrom(DatafeedStatus.values());
Response.DatafeedStats datafeedStats = new Response.DatafeedStats(datafeedId, datafeedStatus);
datafeedStatsList.add(datafeedStats);
}
result = new Response(new QueryPage<>(datafeedStatsList, datafeedStatsList.size(), Datafeed.RESULTS_FIELD));
return result;
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -11,7 +11,7 @@ import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.JobStatus;
import org.elasticsearch.xpack.ml.job.ModelSizeStats;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import org.joda.time.DateTime;

View File

@ -1,43 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.GetSchedulersStatsAction.Response;
import org.elasticsearch.xpack.ml.job.persistence.QueryPage;
import org.elasticsearch.xpack.ml.scheduler.Scheduler;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.ArrayList;
import java.util.List;
public class GetSchedulersStatsActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
final Response result;
int listSize = randomInt(10);
List<Response.SchedulerStats> schedulerStatsList = new ArrayList<>(listSize);
for (int j = 0; j < listSize; j++) {
String schedulerId = randomAsciiOfLength(10);
SchedulerStatus schedulerStatus = randomFrom(SchedulerStatus.values());
Response.SchedulerStats schedulerStats = new Response.SchedulerStats(schedulerId, schedulerStatus);
schedulerStatsList.add(schedulerStats);
}
result = new Response(new QueryPage<>(schedulerStatsList, schedulerStatsList.size(), Scheduler.RESULTS_FIELD));
return result;
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -0,0 +1,44 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.action.PutDatafeedAction.Request;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
import org.junit.Before;
import java.util.Arrays;
public class PutDatafeedActionRequestTests extends AbstractStreamableXContentTestCase<Request> {
private String datafeedId;
@Before
public void setUpDatafeedId() {
datafeedId = DatafeedConfigTests.randomValidDatafeedId();
}
@Override
protected Request createTestInstance() {
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, randomAsciiOfLength(10));
datafeedConfig.setIndexes(Arrays.asList(randomAsciiOfLength(10)));
datafeedConfig.setTypes(Arrays.asList(randomAsciiOfLength(10)));
return new Request(datafeedConfig.build());
}
@Override
protected Request createBlankInstance() {
return new Request();
}
@Override
protected Request parseInstance(XContentParser parser) {
return Request.parseRequest(datafeedId, parser);
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.PutDatafeedAction.Response;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.Arrays;
public class PutDatafeedActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(
DatafeedConfigTests.randomValidDatafeedId(), randomAsciiOfLength(10));
datafeedConfig.setIndexes(Arrays.asList(randomAsciiOfLength(10)));
datafeedConfig.setTypes(Arrays.asList(randomAsciiOfLength(10)));
return new Response(randomBoolean(), datafeedConfig.build());
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -1,44 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.action.PutSchedulerAction.Request;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
import org.junit.Before;
import java.util.Arrays;
public class PutSchedulerActionRequestTests extends AbstractStreamableXContentTestCase<Request> {
private String schedulerId;
@Before
public void setUpSchedulerId() {
schedulerId = SchedulerConfigTests.randomValidSchedulerId();
}
@Override
protected Request createTestInstance() {
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(schedulerId, randomAsciiOfLength(10));
schedulerConfig.setIndexes(Arrays.asList(randomAsciiOfLength(10)));
schedulerConfig.setTypes(Arrays.asList(randomAsciiOfLength(10)));
return new Request(schedulerConfig.build());
}
@Override
protected Request createBlankInstance() {
return new Request();
}
@Override
protected Request parseInstance(XContentParser parser) {
return Request.parseRequest(schedulerId, parser);
}
}

View File

@ -1,31 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.PutSchedulerAction.Response;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfigTests;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import java.util.Arrays;
public class PutSchedulerActionResponseTests extends AbstractStreamableTestCase<Response> {
@Override
protected Response createTestInstance() {
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(
SchedulerConfigTests.randomValidSchedulerId(), randomAsciiOfLength(10));
schedulerConfig.setIndexes(Arrays.asList(randomAsciiOfLength(10)));
schedulerConfig.setTypes(Arrays.asList(randomAsciiOfLength(10)));
return new Response(randomBoolean(), schedulerConfig.build());
}
@Override
protected Response createBlankInstance() {
return new Response();
}
}

View File

@ -6,10 +6,10 @@
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.action.StartSchedulerAction.Request;
import org.elasticsearch.xpack.ml.action.StartDatafeedAction.Request;
import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
public class StartSchedulerActionRequestTests extends AbstractStreamableXContentTestCase<StartSchedulerAction.Request> {
public class StartDatafeedActionRequestTests extends AbstractStreamableXContentTestCase<StartDatafeedAction.Request> {
@Override
protected Request createTestInstance() {

View File

@ -8,18 +8,18 @@ package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.ml.action.StopSchedulerAction.Request;
import org.elasticsearch.xpack.ml.action.StopDatafeedAction.Request;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
import static org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests.createScheduledJob;
import static org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests.createSchedulerConfig;
import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedJob;
import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedConfig;
import static org.hamcrest.Matchers.equalTo;
public class StopSchedulerActionRequestTests extends AbstractStreamableTestCase<StopSchedulerAction.Request> {
public class StopDatafeedActionRequestTests extends AbstractStreamableTestCase<StopDatafeedAction.Request> {
@Override
protected Request createTestInstance() {
@ -34,23 +34,23 @@ public class StopSchedulerActionRequestTests extends AbstractStreamableTestCase<
}
public void testValidate() {
Job job = createScheduledJob().build();
Job job = createDatafeedJob().build();
MlMetadata mlMetadata1 = new MlMetadata.Builder().putJob(job, false).build();
Exception e = expectThrows(ResourceNotFoundException.class, () -> StopSchedulerAction.validate("foo", mlMetadata1));
assertThat(e.getMessage(), equalTo("No scheduler with id [foo] exists"));
Exception e = expectThrows(ResourceNotFoundException.class, () -> StopDatafeedAction.validate("foo", mlMetadata1));
assertThat(e.getMessage(), equalTo("No datafeed with id [foo] exists"));
SchedulerConfig schedulerConfig = createSchedulerConfig("foo", "foo").build();
DatafeedConfig datafeedConfig = createDatafeedConfig("foo", "foo").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false)
.putScheduler(schedulerConfig)
.putDatafeed(datafeedConfig)
.build();
e = expectThrows(ElasticsearchStatusException.class, () -> StopSchedulerAction.validate("foo", mlMetadata2));
assertThat(e.getMessage(), equalTo("scheduler already stopped, expected scheduler status [STARTED], but got [STOPPED]"));
e = expectThrows(ElasticsearchStatusException.class, () -> StopDatafeedAction.validate("foo", mlMetadata2));
assertThat(e.getMessage(), equalTo("datafeed already stopped, expected datafeed status [STARTED], but got [STOPPED]"));
MlMetadata mlMetadata3 = new MlMetadata.Builder().putJob(job, false)
.putScheduler(schedulerConfig)
.updateSchedulerStatus("foo", SchedulerStatus.STARTED)
.putDatafeed(datafeedConfig)
.updateDatafeedStatus("foo", DatafeedStatus.STARTED)
.build();
StopSchedulerAction.validate("foo", mlMetadata3);
StopDatafeedAction.validate("foo", mlMetadata3);
}
}

View File

@ -5,19 +5,19 @@
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction.Request;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction.Request;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractStreamableTestCase;
public class UpdateSchedulerStatusRequestTests extends AbstractStreamableTestCase<Request> {
public class UpdateDatafeedStatusRequestTests extends AbstractStreamableTestCase<Request> {
@Override
protected Request createTestInstance() {
return new Request(randomAsciiOfLengthBetween(1, 20), randomFrom(SchedulerStatus.values()));
return new Request(randomAsciiOfLengthBetween(1, 20), randomFrom(DatafeedStatus.values()));
}
@Override
protected Request createBlankInstance() {
return new Request();
}
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import org.elasticsearch.common.io.stream.Writeable;
@ -23,15 +23,15 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerConfig> {
public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedConfig> {
@Override
protected SchedulerConfig createTestInstance() {
return createRandomizedSchedulerConfig(randomAsciiOfLength(10));
protected DatafeedConfig createTestInstance() {
return createRandomizedDatafeedConfig(randomAsciiOfLength(10));
}
public static SchedulerConfig createRandomizedSchedulerConfig(String jobId) {
SchedulerConfig.Builder builder = new SchedulerConfig.Builder(randomValidSchedulerId(), jobId);
public static DatafeedConfig createRandomizedDatafeedConfig(String jobId) {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(randomValidDatafeedId(), jobId);
builder.setIndexes(randomStringList(1, 10));
builder.setTypes(randomStringList(1, 10));
if (randomBoolean()) {
@ -77,64 +77,64 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
}
@Override
protected Writeable.Reader<SchedulerConfig> instanceReader() {
return SchedulerConfig::new;
protected Writeable.Reader<DatafeedConfig> instanceReader() {
return DatafeedConfig::new;
}
@Override
protected SchedulerConfig parseInstance(XContentParser parser) {
return SchedulerConfig.PARSER.apply(parser, null).build();
protected DatafeedConfig parseInstance(XContentParser parser) {
return DatafeedConfig.PARSER.apply(parser, null).build();
}
public void testFillDefaults() {
SchedulerConfig.Builder expectedSchedulerConfig = new SchedulerConfig.Builder("scheduler1", "job1");
expectedSchedulerConfig.setIndexes(Arrays.asList("index"));
expectedSchedulerConfig.setTypes(Arrays.asList("type"));
expectedSchedulerConfig.setQueryDelay(60L);
expectedSchedulerConfig.setScrollSize(1000);
SchedulerConfig.Builder defaultedSchedulerConfig = new SchedulerConfig.Builder("scheduler1", "job1");
defaultedSchedulerConfig.setIndexes(Arrays.asList("index"));
defaultedSchedulerConfig.setTypes(Arrays.asList("type"));
DatafeedConfig.Builder expectedDatafeedConfig = new DatafeedConfig.Builder("datafeed1", "job1");
expectedDatafeedConfig.setIndexes(Arrays.asList("index"));
expectedDatafeedConfig.setTypes(Arrays.asList("type"));
expectedDatafeedConfig.setQueryDelay(60L);
expectedDatafeedConfig.setScrollSize(1000);
DatafeedConfig.Builder defaultedDatafeedConfig = new DatafeedConfig.Builder("datafeed1", "job1");
defaultedDatafeedConfig.setIndexes(Arrays.asList("index"));
defaultedDatafeedConfig.setTypes(Arrays.asList("type"));
assertEquals(expectedSchedulerConfig.build(), defaultedSchedulerConfig.build());
assertEquals(expectedDatafeedConfig.build(), defaultedDatafeedConfig.build());
}
public void testEquals_GivenDifferentQueryDelay() {
SchedulerConfig.Builder b1 = createFullyPopulated();
SchedulerConfig.Builder b2 = createFullyPopulated();
DatafeedConfig.Builder b1 = createFullyPopulated();
DatafeedConfig.Builder b2 = createFullyPopulated();
b2.setQueryDelay(120L);
SchedulerConfig sc1 = b1.build();
SchedulerConfig sc2 = b2.build();
DatafeedConfig sc1 = b1.build();
DatafeedConfig sc2 = b2.build();
assertFalse(sc1.equals(sc2));
assertFalse(sc2.equals(sc1));
}
public void testEquals_GivenDifferentScrollSize() {
SchedulerConfig.Builder b1 = createFullyPopulated();
SchedulerConfig.Builder b2 = createFullyPopulated();
DatafeedConfig.Builder b1 = createFullyPopulated();
DatafeedConfig.Builder b2 = createFullyPopulated();
b2.setScrollSize(1);
SchedulerConfig sc1 = b1.build();
SchedulerConfig sc2 = b2.build();
DatafeedConfig sc1 = b1.build();
DatafeedConfig sc2 = b2.build();
assertFalse(sc1.equals(sc2));
assertFalse(sc2.equals(sc1));
}
public void testEquals_GivenDifferentFrequency() {
SchedulerConfig.Builder b1 = createFullyPopulated();
SchedulerConfig.Builder b2 = createFullyPopulated();
DatafeedConfig.Builder b1 = createFullyPopulated();
DatafeedConfig.Builder b2 = createFullyPopulated();
b2.setFrequency(120L);
SchedulerConfig sc1 = b1.build();
SchedulerConfig sc2 = b2.build();
DatafeedConfig sc1 = b1.build();
DatafeedConfig sc2 = b2.build();
assertFalse(sc1.equals(sc2));
assertFalse(sc2.equals(sc1));
}
public void testEquals_GivenDifferentIndexes() {
SchedulerConfig.Builder sc1 = createFullyPopulated();
SchedulerConfig.Builder sc2 = createFullyPopulated();
DatafeedConfig.Builder sc1 = createFullyPopulated();
DatafeedConfig.Builder sc2 = createFullyPopulated();
sc2.setIndexes(Arrays.asList("blah", "di", "blah"));
assertFalse(sc1.build().equals(sc2.build()));
@ -142,8 +142,8 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
}
public void testEquals_GivenDifferentTypes() {
SchedulerConfig.Builder sc1 = createFullyPopulated();
SchedulerConfig.Builder sc2 = createFullyPopulated();
DatafeedConfig.Builder sc1 = createFullyPopulated();
DatafeedConfig.Builder sc2 = createFullyPopulated();
sc2.setTypes(Arrays.asList("blah", "di", "blah"));
assertFalse(sc1.build().equals(sc2.build()));
@ -151,27 +151,27 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
}
public void testEquals_GivenDifferentQuery() {
SchedulerConfig.Builder b1 = createFullyPopulated();
SchedulerConfig.Builder b2 = createFullyPopulated();
DatafeedConfig.Builder b1 = createFullyPopulated();
DatafeedConfig.Builder b2 = createFullyPopulated();
b2.setQuery(QueryBuilders.termQuery("foo", "bar"));
SchedulerConfig sc1 = b1.build();
SchedulerConfig sc2 = b2.build();
DatafeedConfig sc1 = b1.build();
DatafeedConfig sc2 = b2.build();
assertFalse(sc1.equals(sc2));
assertFalse(sc2.equals(sc1));
}
public void testEquals_GivenDifferentAggregations() {
SchedulerConfig.Builder sc1 = createFullyPopulated();
SchedulerConfig.Builder sc2 = createFullyPopulated();
DatafeedConfig.Builder sc1 = createFullyPopulated();
DatafeedConfig.Builder sc2 = createFullyPopulated();
sc2.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.count("foo")));
assertFalse(sc1.build().equals(sc2.build()));
assertFalse(sc2.build().equals(sc1.build()));
}
private static SchedulerConfig.Builder createFullyPopulated() {
SchedulerConfig.Builder sc = new SchedulerConfig.Builder("scheduler1", "job1");
private static DatafeedConfig.Builder createFullyPopulated() {
DatafeedConfig.Builder sc = new DatafeedConfig.Builder("datafeed1", "job1");
sc.setIndexes(Arrays.asList("myIndex"));
sc.setTypes(Arrays.asList("myType1", "myType2"));
sc.setFrequency(60L);
@ -183,62 +183,62 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
}
public void testCheckValid_GivenNullIndexes() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
expectThrows(IllegalArgumentException.class, () -> conf.setIndexes(null));
}
public void testCheckValid_GivenEmptyIndexes() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(Collections.emptyList());
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "indexes", "[]"), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[]"), e.getMessage());
}
public void testCheckValid_GivenIndexesContainsOnlyNulls() throws IOException {
List<String> indexes = new ArrayList<>();
indexes.add(null);
indexes.add(null);
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(indexes);
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "indexes", "[null, null]"), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[null, null]"), e.getMessage());
}
public void testCheckValid_GivenIndexesContainsOnlyEmptyStrings() throws IOException {
List<String> indexes = new ArrayList<>();
indexes.add("");
indexes.add("");
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(indexes);
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "indexes", "[, ]"), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[, ]"), e.getMessage());
}
public void testCheckValid_GivenNegativeQueryDelay() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setQueryDelay(-10L));
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "query_delay", -10L), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "query_delay", -10L), e.getMessage());
}
public void testCheckValid_GivenZeroFrequency() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setFrequency(0L));
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "frequency", 0L), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "frequency", 0L), e.getMessage());
}
public void testCheckValid_GivenNegativeFrequency() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setFrequency(-600L));
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "frequency", -600L), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "frequency", -600L), e.getMessage());
}
public void testCheckValid_GivenNegativeScrollSize() throws IOException {
SchedulerConfig.Builder conf = new SchedulerConfig.Builder("scheduler1", "job1");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setScrollSize(-1000));
assertEquals(Messages.getMessage(Messages.SCHEDULER_CONFIG_INVALID_OPTION_VALUE, "scroll_size", -1000L), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "scroll_size", -1000L), e.getMessage());
}
public static String randomValidSchedulerId() {
public static String randomValidDatafeedId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException;
@ -20,9 +20,9 @@ import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.FlushJobAction;
import org.elasticsearch.xpack.ml.action.InternalStartSchedulerAction;
import org.elasticsearch.xpack.ml.action.InternalStartDatafeedAction;
import org.elasticsearch.xpack.ml.action.PostDataAction;
import org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction;
import org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction;
import org.elasticsearch.xpack.ml.job.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.DataCounts;
import org.elasticsearch.xpack.ml.job.DataDescription;
@ -32,8 +32,8 @@ import org.elasticsearch.xpack.ml.job.JobStatus;
import org.elasticsearch.xpack.ml.job.audit.Auditor;
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import org.junit.Before;
import java.io.ByteArrayInputStream;
@ -45,8 +45,8 @@ import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.function.Consumer;
import static org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction.INSTANCE;
import static org.elasticsearch.xpack.ml.action.UpdateSchedulerStatusAction.Request;
import static org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction.INSTANCE;
import static org.elasticsearch.xpack.ml.action.UpdateDatafeedStatusAction.Request;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
@ -59,7 +59,7 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ScheduledJobRunnerTests extends ESTestCase {
public class DatafeedJobRunnerTests extends ESTestCase {
private Client client;
private ActionFuture<PostDataAction.Response> jobDataFuture;
@ -67,7 +67,7 @@ public class ScheduledJobRunnerTests extends ESTestCase {
private ClusterService clusterService;
private ThreadPool threadPool;
private DataExtractorFactory dataExtractorFactory;
private ScheduledJobRunner scheduledJobRunner;
private DatafeedJobRunner datafeedJobRunner;
private long currentTime = 120000;
@Before
@ -80,9 +80,9 @@ public class ScheduledJobRunnerTests extends ESTestCase {
doAnswer(invocation -> {
@SuppressWarnings("rawtypes")
ActionListener<Object> actionListener = (ActionListener) invocation.getArguments()[2];
actionListener.onResponse(new UpdateSchedulerStatusAction.Response());
actionListener.onResponse(new UpdateDatafeedStatusAction.Response());
return null;
}).when(client).execute(same(UpdateSchedulerStatusAction.INSTANCE), any(), any());
}).when(client).execute(same(UpdateDatafeedStatusAction.INSTANCE), any(), any());
JobProvider jobProvider = mock(JobProvider.class);
Mockito.doAnswer(invocationOnMock -> {
@ -100,13 +100,13 @@ public class ScheduledJobRunnerTests extends ESTestCase {
((Runnable) invocation.getArguments()[0]).run();
return null;
}).when(executorService).submit(any(Runnable.class));
when(threadPool.executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME)).thenReturn(executorService);
when(threadPool.executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME)).thenReturn(executorService);
when(client.execute(same(PostDataAction.INSTANCE), any())).thenReturn(jobDataFuture);
when(client.execute(same(FlushJobAction.INSTANCE), any())).thenReturn(flushJobFuture);
scheduledJobRunner = new ScheduledJobRunner(threadPool, client, clusterService, jobProvider, () -> currentTime) {
datafeedJobRunner = new DatafeedJobRunner(threadPool, client, clusterService, jobProvider, () -> currentTime) {
@Override
DataExtractorFactory createDataExtractorFactory(SchedulerConfig schedulerConfig, Job job) {
DataExtractorFactory createDataExtractorFactory(DatafeedConfig datafeedConfig, Job job) {
return dataExtractorFactory;
}
};
@ -121,13 +121,13 @@ public class ScheduledJobRunnerTests extends ESTestCase {
}
public void testStart_GivenNewlyCreatedJobLoopBack() throws Exception {
Job.Builder jobBuilder = createScheduledJob();
SchedulerConfig schedulerConfig = createSchedulerConfig("scheduler1", "foo").build();
Job.Builder jobBuilder = createDatafeedJob();
DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", "foo").build();
DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0));
Job job = jobBuilder.build();
MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false)
.putScheduler(schedulerConfig)
.putDatafeed(datafeedConfig)
.updateStatus("foo", JobStatus.OPENED, null)
.build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
@ -141,15 +141,15 @@ public class ScheduledJobRunnerTests extends ESTestCase {
when(dataExtractor.next()).thenReturn(Optional.of(in));
when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts));
Consumer<Exception> handler = mockConsumer();
InternalStartSchedulerAction.SchedulerTask task = mock(InternalStartSchedulerAction.SchedulerTask.class);
scheduledJobRunner.run("scheduler1", 0L, 60000L, task, handler);
InternalStartDatafeedAction.DatafeedTask task = mock(InternalStartDatafeedAction.DatafeedTask.class);
datafeedJobRunner.run("datafeed1", 0L, 60000L, task, handler);
verify(threadPool, times(1)).executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME);
verify(threadPool, times(1)).executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME);
verify(threadPool, never()).schedule(any(), any(), any());
verify(client).execute(same(PostDataAction.INSTANCE), eq(createExpectedPostDataRequest(job)));
verify(client).execute(same(FlushJobAction.INSTANCE), any());
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STARTED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STOPPED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STARTED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STOPPED)), any());
}
private static PostDataAction.Request createExpectedPostDataRequest(Job job) {
@ -162,13 +162,13 @@ public class ScheduledJobRunnerTests extends ESTestCase {
}
public void testStart_extractionProblem() throws Exception {
Job.Builder jobBuilder = createScheduledJob();
SchedulerConfig schedulerConfig = createSchedulerConfig("scheduler1", "foo").build();
Job.Builder jobBuilder = createDatafeedJob();
DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", "foo").build();
DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0));
Job job = jobBuilder.build();
MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false)
.putScheduler(schedulerConfig)
.putDatafeed(datafeedConfig)
.updateStatus("foo", JobStatus.OPENED, null)
.build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
@ -181,25 +181,25 @@ public class ScheduledJobRunnerTests extends ESTestCase {
when(dataExtractor.next()).thenThrow(new RuntimeException("dummy"));
when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts));
Consumer<Exception> handler = mockConsumer();
InternalStartSchedulerAction.SchedulerTask task = mock(InternalStartSchedulerAction.SchedulerTask.class);
scheduledJobRunner.run("scheduler1", 0L, 60000L, task, handler);
InternalStartDatafeedAction.DatafeedTask task = mock(InternalStartDatafeedAction.DatafeedTask.class);
datafeedJobRunner.run("datafeed1", 0L, 60000L, task, handler);
verify(threadPool, times(1)).executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME);
verify(threadPool, times(1)).executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME);
verify(threadPool, never()).schedule(any(), any(), any());
verify(client, never()).execute(same(PostDataAction.INSTANCE), eq(new PostDataAction.Request("foo")));
verify(client, never()).execute(same(FlushJobAction.INSTANCE), any());
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STARTED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STOPPED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STARTED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STOPPED)), any());
}
public void testStart_GivenNewlyCreatedJobLoopBackAndRealtime() throws Exception {
Job.Builder jobBuilder = createScheduledJob();
SchedulerConfig schedulerConfig = createSchedulerConfig("scheduler1", "foo").build();
Job.Builder jobBuilder = createDatafeedJob();
DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", "foo").build();
DataCounts dataCounts = new DataCounts("foo", 1, 0, 0, 0, 0, 0, 0, new Date(0), new Date(0));
Job job = jobBuilder.build();
MlMetadata mlMetadata = new MlMetadata.Builder()
.putJob(job, false)
.putScheduler(schedulerConfig)
.putDatafeed(datafeedConfig)
.updateStatus("foo", JobStatus.OPENED, null)
.build();
when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("_name"))
@ -214,29 +214,29 @@ public class ScheduledJobRunnerTests extends ESTestCase {
when(jobDataFuture.get()).thenReturn(new PostDataAction.Response(dataCounts));
Consumer<Exception> handler = mockConsumer();
boolean cancelled = randomBoolean();
InternalStartSchedulerAction.SchedulerTask task =
new InternalStartSchedulerAction.SchedulerTask(1, "type", "action", null, "scheduler1");
scheduledJobRunner.run("scheduler1", 0L, null, task, handler);
InternalStartDatafeedAction.DatafeedTask task =
new InternalStartDatafeedAction.DatafeedTask(1, "type", "action", null, "datafeed1");
datafeedJobRunner.run("datafeed1", 0L, null, task, handler);
verify(threadPool, times(1)).executor(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME);
verify(threadPool, times(1)).executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME);
if (cancelled) {
task.stop();
verify(client).execute(same(INSTANCE), eq(new Request("scheduler1", SchedulerStatus.STOPPED)), any());
verify(client).execute(same(INSTANCE), eq(new Request("datafeed1", DatafeedStatus.STOPPED)), any());
} else {
verify(client).execute(same(PostDataAction.INSTANCE), eq(createExpectedPostDataRequest(job)));
verify(client).execute(same(FlushJobAction.INSTANCE), any());
verify(threadPool, times(1)).schedule(eq(new TimeValue(480100)), eq(MlPlugin.SCHEDULED_RUNNER_THREAD_POOL_NAME), any());
verify(threadPool, times(1)).schedule(eq(new TimeValue(480100)), eq(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME), any());
}
}
public static SchedulerConfig.Builder createSchedulerConfig(String schedulerId, String jobId) {
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(schedulerId, jobId);
schedulerConfig.setIndexes(Arrays.asList("myIndex"));
schedulerConfig.setTypes(Arrays.asList("myType"));
return schedulerConfig;
public static DatafeedConfig.Builder createDatafeedConfig(String datafeedId, String jobId) {
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, jobId);
datafeedConfig.setIndexes(Arrays.asList("myIndex"));
datafeedConfig.setTypes(Arrays.asList("myType"));
return datafeedConfig;
}
public static Job.Builder createScheduledJob() {
public static Job.Builder createDatafeedJob() {
AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Arrays.asList(new Detector.Builder("metric", "field").build()));
acBuilder.setBucketSpan(3600L);
acBuilder.setDetectors(Arrays.asList(new Detector.Builder("metric", "field").build()));
@ -247,33 +247,33 @@ public class ScheduledJobRunnerTests extends ESTestCase {
}
public void testValidate() {
Job job1 = createScheduledJob().build();
Job job1 = createDatafeedJob().build();
MlMetadata mlMetadata1 = new MlMetadata.Builder()
.putJob(job1, false)
.build();
Exception e = expectThrows(ResourceNotFoundException.class,
() -> ScheduledJobRunner.validate("some-scheduler", mlMetadata1));
assertThat(e.getMessage(), equalTo("No scheduler with id [some-scheduler] exists"));
() -> DatafeedJobRunner.validate("some-datafeed", mlMetadata1));
assertThat(e.getMessage(), equalTo("No datafeed with id [some-datafeed] exists"));
SchedulerConfig schedulerConfig1 = createSchedulerConfig("foo-scheduler", "foo").build();
DatafeedConfig datafeedConfig1 = createDatafeedConfig("foo-datafeed", "foo").build();
MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1)
.putScheduler(schedulerConfig1)
.putDatafeed(datafeedConfig1)
.build();
e = expectThrows(ElasticsearchStatusException.class,
() -> ScheduledJobRunner.validate("foo-scheduler", mlMetadata2));
assertThat(e.getMessage(), equalTo("cannot start scheduler, expected job status [OPENED], but got [CLOSED]"));
() -> DatafeedJobRunner.validate("foo-datafeed", mlMetadata2));
assertThat(e.getMessage(), equalTo("cannot start datafeed, expected job status [OPENED], but got [CLOSED]"));
MlMetadata mlMetadata3 = new MlMetadata.Builder(mlMetadata2)
.updateStatus("foo", JobStatus.OPENED, null)
.updateSchedulerStatus("foo-scheduler", SchedulerStatus.STARTED)
.updateDatafeedStatus("foo-datafeed", DatafeedStatus.STARTED)
.build();
e = expectThrows(ElasticsearchStatusException.class,
() -> ScheduledJobRunner.validate("foo-scheduler", mlMetadata3));
assertThat(e.getMessage(), equalTo("scheduler already started, expected scheduler status [STOPPED], but got [STARTED]"));
() -> DatafeedJobRunner.validate("foo-datafeed", mlMetadata3));
assertThat(e.getMessage(), equalTo("datafeed already started, expected datafeed status [STOPPED], but got [STARTED]"));
}
@SuppressWarnings("unchecked")
private Consumer<Exception> mockConsumer() {
return mock(Consumer.class);
}
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.client.Client;
@ -13,8 +13,8 @@ import org.elasticsearch.xpack.ml.action.PostDataAction;
import org.elasticsearch.xpack.ml.job.DataCounts;
import org.elasticsearch.xpack.ml.job.DataDescription;
import org.elasticsearch.xpack.ml.job.audit.Auditor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.scheduler.extractor.DataExtractorFactory;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
import org.junit.Before;
import org.mockito.ArgumentCaptor;
@ -35,7 +35,7 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ScheduledJobTests extends ESTestCase {
public class DatafeedJobTests extends ESTestCase {
private Auditor auditor;
private DataExtractorFactory dataExtractorFactory;
@ -73,8 +73,8 @@ public class ScheduledJobTests extends ESTestCase {
}
public void testLookBackRunWithEndTime() throws Exception {
ScheduledJob scheduledJob = createScheduledJob(1000, 500, -1, -1);
assertNull(scheduledJob.runLookBack(0L, 1000L));
DatafeedJob datafeedJob = createDatafeedJob(1000, 500, -1, -1);
assertNull(datafeedJob.runLookBack(0L, 1000L));
verify(dataExtractorFactory).newExtractor(0L, 1000L);
FlushJobAction.Request flushRequest = new FlushJobAction.Request("_job_id");
@ -86,8 +86,8 @@ public class ScheduledJobTests extends ESTestCase {
currentTime = 2000L;
long frequencyMs = 1000;
long queryDelayMs = 500;
ScheduledJob scheduledJob = createScheduledJob(frequencyMs, queryDelayMs, -1, -1);
long next = scheduledJob.runLookBack(0L, null);
DatafeedJob datafeedJob = createDatafeedJob(frequencyMs, queryDelayMs, -1, -1);
long next = datafeedJob.runLookBack(0L, null);
assertEquals(2000 + frequencyMs + 100, next);
verify(dataExtractorFactory).newExtractor(0L, 1500L);
@ -108,8 +108,8 @@ public class ScheduledJobTests extends ESTestCase {
long frequencyMs = 1000;
long queryDelayMs = 500;
ScheduledJob scheduledJob = createScheduledJob(frequencyMs, queryDelayMs, latestFinalBucketEndTimeMs, latestRecordTimeMs);
long next = scheduledJob.runLookBack(0L, null);
DatafeedJob datafeedJob = createDatafeedJob(frequencyMs, queryDelayMs, latestFinalBucketEndTimeMs, latestRecordTimeMs);
long next = datafeedJob.runLookBack(0L, null);
assertEquals(10000 + frequencyMs + 100, next);
verify(dataExtractorFactory).newExtractor(5000 + 1L, currentTime - queryDelayMs);
@ -122,8 +122,8 @@ public class ScheduledJobTests extends ESTestCase {
currentTime = 60000L;
long frequencyMs = 100;
long queryDelayMs = 1000;
ScheduledJob scheduledJob = createScheduledJob(frequencyMs, queryDelayMs, 1000, -1);
long next = scheduledJob.runRealtime();
DatafeedJob datafeedJob = createDatafeedJob(frequencyMs, queryDelayMs, 1000, -1);
long next = datafeedJob.runRealtime();
assertEquals(currentTime + frequencyMs + 100, next);
verify(dataExtractorFactory).newExtractor(1000L + 1L, currentTime - queryDelayMs);
@ -136,19 +136,19 @@ public class ScheduledJobTests extends ESTestCase {
public void testEmptyDataCount() throws Exception {
when(dataExtractor.hasNext()).thenReturn(false);
ScheduledJob scheduledJob = createScheduledJob(1000, 500, -1, -1);
expectThrows(ScheduledJob.EmptyDataCountException.class, () -> scheduledJob.runLookBack(0L, 1000L));
DatafeedJob datafeedJob = createDatafeedJob(1000, 500, -1, -1);
expectThrows(DatafeedJob.EmptyDataCountException.class, () -> datafeedJob.runLookBack(0L, 1000L));
}
public void testExtractionProblem() throws Exception {
when(dataExtractor.hasNext()).thenReturn(true);
when(dataExtractor.next()).thenThrow(new IOException());
ScheduledJob scheduledJob = createScheduledJob(1000, 500, -1, -1);
expectThrows(ScheduledJob.ExtractionProblemException.class, () -> scheduledJob.runLookBack(0L, 1000L));
DatafeedJob datafeedJob = createDatafeedJob(1000, 500, -1, -1);
expectThrows(DatafeedJob.ExtractionProblemException.class, () -> datafeedJob.runLookBack(0L, 1000L));
currentTime = 3001;
expectThrows(ScheduledJob.ExtractionProblemException.class, scheduledJob::runRealtime);
expectThrows(DatafeedJob.ExtractionProblemException.class, datafeedJob::runRealtime);
ArgumentCaptor<Long> startTimeCaptor = ArgumentCaptor.forClass(Long.class);
ArgumentCaptor<Long> endTimeCaptor = ArgumentCaptor.forClass(Long.class);
@ -164,11 +164,11 @@ public class ScheduledJobTests extends ESTestCase {
when(client.execute(same(FlushJobAction.INSTANCE), any())).thenReturn(flushJobFuture);
when(client.execute(same(PostDataAction.INSTANCE), eq(new PostDataAction.Request("_job_id")))).thenThrow(new RuntimeException());
ScheduledJob scheduledJob = createScheduledJob(1000, 500, -1, -1);
expectThrows(ScheduledJob.AnalysisProblemException.class, () -> scheduledJob.runLookBack(0L, 1000L));
DatafeedJob datafeedJob = createDatafeedJob(1000, 500, -1, -1);
expectThrows(DatafeedJob.AnalysisProblemException.class, () -> datafeedJob.runLookBack(0L, 1000L));
currentTime = 3001;
expectThrows(ScheduledJob.EmptyDataCountException.class, scheduledJob::runRealtime);
expectThrows(DatafeedJob.EmptyDataCountException.class, datafeedJob::runRealtime);
ArgumentCaptor<Long> startTimeCaptor = ArgumentCaptor.forClass(Long.class);
ArgumentCaptor<Long> endTimeCaptor = ArgumentCaptor.forClass(Long.class);
@ -180,10 +180,10 @@ public class ScheduledJobTests extends ESTestCase {
verify(client, times(0)).execute(same(FlushJobAction.INSTANCE), any());
}
private ScheduledJob createScheduledJob(long frequencyMs, long queryDelayMs, long latestFinalBucketEndTimeMs,
private DatafeedJob createDatafeedJob(long frequencyMs, long queryDelayMs, long latestFinalBucketEndTimeMs,
long latestRecordTimeMs) {
Supplier<Long> currentTimeSupplier = () -> currentTime;
return new ScheduledJob("_job_id", dataDescription.build(), frequencyMs, queryDelayMs, dataExtractorFactory, client, auditor,
return new DatafeedJob("_job_id", dataDescription.build(), frequencyMs, queryDelayMs, dataExtractorFactory, client, auditor,
currentTimeSupplier, latestFinalBucketEndTimeMs, latestRecordTimeMs);
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -18,20 +18,20 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
public class ScheduledJobValidatorTests extends ESTestCase {
public class DatafeedJobValidatorTests extends ESTestCase {
public void testValidate_GivenNonZeroLatency() {
String errorMessage = Messages.getMessage(Messages.SCHEDULER_DOES_NOT_SUPPORT_JOB_WITH_LATENCY);
String errorMessage = Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY);
Job.Builder builder = buildJobBuilder("foo");
AnalysisConfig.Builder ac = createAnalysisConfig();
ac.setBucketSpan(1800L);
ac.setLatency(3600L);
builder.setAnalysisConfig(ac);
Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfig().build();
DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class,
() -> ScheduledJobValidator.validate(schedulerConfig, job));
() -> DatafeedJobValidator.validate(datafeedConfig, job));
assertEquals(errorMessage, e.getMessage());
}
@ -43,9 +43,9 @@ public class ScheduledJobValidatorTests extends ESTestCase {
ac.setLatency(0L);
builder.setAnalysisConfig(ac);
Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfig().build();
DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
ScheduledJobValidator.validate(schedulerConfig, job);
DatafeedJobValidator.validate(datafeedConfig, job);
}
public void testVerify_GivenNoLatency() {
@ -55,9 +55,9 @@ public class ScheduledJobValidatorTests extends ESTestCase {
ac.setBucketSpan(100L);
builder.setAnalysisConfig(ac);
Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfig().build();
DatafeedConfig datafeedConfig = createValidDatafeedConfig().build();
ScheduledJobValidator.validate(schedulerConfig, job);
DatafeedJobValidator.validate(datafeedConfig, job);
}
public void testVerify_GivenAggsAndCorrectSummaryCountField() throws IOException {
@ -67,40 +67,40 @@ public class ScheduledJobValidatorTests extends ESTestCase {
ac.setSummaryCountFieldName("doc_count");
builder.setAnalysisConfig(ac);
Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfigWithAggs().build();
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs().build();
ScheduledJobValidator.validate(schedulerConfig, job);
DatafeedJobValidator.validate(datafeedConfig, job);
}
public void testVerify_GivenAggsAndNoSummaryCountField() throws IOException {
String errorMessage = Messages.getMessage(Messages.SCHEDULER_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD,
SchedulerConfig.DOC_COUNT);
String errorMessage = Messages.getMessage(Messages.DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD,
DatafeedConfig.DOC_COUNT);
Job.Builder builder = buildJobBuilder("foo");
AnalysisConfig.Builder ac = createAnalysisConfig();
ac.setBucketSpan(1800L);
builder.setAnalysisConfig(ac);
Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfigWithAggs().build();
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs().build();
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class,
() -> ScheduledJobValidator.validate(schedulerConfig, job));
() -> DatafeedJobValidator.validate(datafeedConfig, job));
assertEquals(errorMessage, e.getMessage());
}
public void testVerify_GivenAggsAndWrongSummaryCountField() throws IOException {
String errorMessage = Messages.getMessage(
Messages.SCHEDULER_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, SchedulerConfig.DOC_COUNT);
Messages.DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, DatafeedConfig.DOC_COUNT);
Job.Builder builder = buildJobBuilder("foo");
AnalysisConfig.Builder ac = createAnalysisConfig();
ac.setBucketSpan(1800L);
ac.setSummaryCountFieldName("wrong");
builder.setAnalysisConfig(ac);
Job job = builder.build();
SchedulerConfig schedulerConfig = createValidSchedulerConfigWithAggs().build();
DatafeedConfig datafeedConfig = createValidDatafeedConfigWithAggs().build();
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class,
() -> ScheduledJobValidator.validate(schedulerConfig, job));
() -> DatafeedJobValidator.validate(datafeedConfig, job));
assertEquals(errorMessage, e.getMessage());
}
@ -121,16 +121,16 @@ public class ScheduledJobValidatorTests extends ESTestCase {
return ac;
}
private static SchedulerConfig.Builder createValidSchedulerConfigWithAggs() throws IOException {
SchedulerConfig.Builder schedulerConfig = createValidSchedulerConfig();
schedulerConfig.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo")));
return schedulerConfig;
private static DatafeedConfig.Builder createValidDatafeedConfigWithAggs() throws IOException {
DatafeedConfig.Builder datafeedConfig = createValidDatafeedConfig();
datafeedConfig.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo")));
return datafeedConfig;
}
private static SchedulerConfig.Builder createValidSchedulerConfig() {
SchedulerConfig.Builder builder = new SchedulerConfig.Builder("my-scheduler", "my-job");
private static DatafeedConfig.Builder createValidDatafeedConfig() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("my-datafeed", "my-job");
builder.setIndexes(Collections.singletonList("myIndex"));
builder.setTypes(Collections.singletonList("myType"));
return builder;
}
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.test.ESTestCase;
public class DatafeedStatusTests extends ESTestCase {
public void testForString() {
assertEquals(DatafeedStatus.fromString("started"), DatafeedStatus.STARTED);
assertEquals(DatafeedStatus.fromString("stopped"), DatafeedStatus.STOPPED);
}
public void testValidOrdinals() {
assertEquals(0, DatafeedStatus.STARTED.ordinal());
assertEquals(1, DatafeedStatus.STOPPED.ordinal());
}
}

View File

@ -0,0 +1,29 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase;
public class DatafeedTests extends AbstractSerializingTestCase<Datafeed> {
@Override
protected Datafeed createTestInstance() {
return new Datafeed(DatafeedConfigTests.createRandomizedDatafeedConfig(randomAsciiOfLength(10)),
randomFrom(DatafeedStatus.values()));
}
@Override
protected Writeable.Reader<Datafeed> instanceReader() {
return Datafeed::new;
}
@Override
protected Datafeed parseInstance(XContentParser parser) {
return Datafeed.PARSER.apply(parser, null);
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
package org.elasticsearch.xpack.ml.datafeed;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.job.audit.Auditor;
@ -29,14 +29,14 @@ public class ProblemTrackerTests extends ESTestCase {
public void testReportExtractionProblem() {
problemTracker.reportExtractionProblem("foo");
verify(auditor).error("Scheduler is encountering errors extracting data: foo");
verify(auditor).error("Datafeed is encountering errors extracting data: foo");
assertTrue(problemTracker.hasProblems());
}
public void testReportAnalysisProblem() {
problemTracker.reportAnalysisProblem("foo");
verify(auditor).error("Scheduler is encountering errors submitting data for analysis: foo");
verify(auditor).error("Datafeed is encountering errors submitting data for analysis: foo");
assertTrue(problemTracker.hasProblems());
}
@ -44,7 +44,7 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.reportExtractionProblem("foo");
problemTracker.reportAnalysisProblem("foo");
verify(auditor, times(1)).error("Scheduler is encountering errors extracting data: foo");
verify(auditor, times(1)).error("Datafeed is encountering errors extracting data: foo");
assertTrue(problemTracker.hasProblems());
}
@ -53,7 +53,7 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.finishReport();
problemTracker.reportExtractionProblem("foo");
verify(auditor, times(1)).error("Scheduler is encountering errors extracting data: foo");
verify(auditor, times(1)).error("Datafeed is encountering errors extracting data: foo");
assertTrue(problemTracker.hasProblems());
}
@ -70,7 +70,7 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.updateEmptyDataCount(true);
}
verify(auditor).warning("Scheduler has been retrieving no data for a while");
verify(auditor).warning("Datafeed has been retrieving no data for a while");
}
public void testUpdateEmptyDataCount_GivenEmptyElevenTimes() {
@ -78,7 +78,7 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.updateEmptyDataCount(true);
}
verify(auditor, times(1)).warning("Scheduler has been retrieving no data for a while");
verify(auditor, times(1)).warning("Datafeed has been retrieving no data for a while");
}
public void testUpdateEmptyDataCount_GivenNonEmptyAfterNineEmpty() {
@ -96,8 +96,8 @@ public class ProblemTrackerTests extends ESTestCase {
}
problemTracker.updateEmptyDataCount(false);
verify(auditor).warning("Scheduler has been retrieving no data for a while");
verify(auditor).info("Scheduler has started retrieving data again");
verify(auditor).warning("Datafeed has been retrieving no data for a while");
verify(auditor).info("Datafeed has started retrieving data again");
}
public void testFinishReport_GivenNoProblems() {
@ -112,8 +112,8 @@ public class ProblemTrackerTests extends ESTestCase {
problemTracker.finishReport();
problemTracker.finishReport();
verify(auditor).error("Scheduler is encountering errors extracting data: bar");
verify(auditor).info("Scheduler has recovered data extraction and analysis");
verify(auditor).error("Datafeed is encountering errors extracting data: bar");
verify(auditor).info("Datafeed has recovered data extraction and analysis");
assertFalse(problemTracker.hasProblems());
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.search.SearchHit;
@ -127,4 +127,4 @@ public class ExtractedFieldTests extends ESTestCase {
return hit;
}
}
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ESTestCase;
@ -77,4 +77,4 @@ public class ExtractedFieldsTests extends ESTestCase {
expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit));
}
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler.extractor.scroll;
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ESTestCase;
@ -69,4 +69,4 @@ public class SearchHitToJsonProcessorTests extends ESTestCase {
}
return outputStream.toString(StandardCharsets.UTF_8.name());
}
}
}

View File

@ -24,7 +24,7 @@ import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class ScheduledJobIT extends ESRestTestCase {
public class DatafeedJobIT extends ESRestTestCase {
@Before
public void setUpData() throws Exception {
@ -111,17 +111,17 @@ public class ScheduledJobIT extends ESRestTestCase {
new LookbackOnlyTestHelper("lookback-1", "airline-data").setShouldSucceedProcessing(true).execute();
}
public void testLookbackOnlyWithSchedulerSourceEnabled() throws Exception {
new LookbackOnlyTestHelper("lookback-2", "airline-data").setEnableSchedulerSource(true).execute();
public void testLookbackOnlyWithDatafeedSourceEnabled() throws Exception {
new LookbackOnlyTestHelper("lookback-2", "airline-data").setEnableDatafeedSource(true).execute();
}
public void testLookbackOnlyWithDocValuesDisabledAndSchedulerSourceDisabled() throws Exception {
public void testLookbackOnlyWithDocValuesDisabledAndDatafeedSourceDisabled() throws Exception {
new LookbackOnlyTestHelper("lookback-3", "airline-data-disabled-doc-values").setShouldSucceedInput(false)
.setShouldSucceedProcessing(false).execute();
}
public void testLookbackOnlyWithDocValuesDisabledAndSchedulerSourceEnabled() throws Exception {
new LookbackOnlyTestHelper("lookback-4", "airline-data-disabled-doc-values").setEnableSchedulerSource(true).execute();
public void testLookbackOnlyWithDocValuesDisabledAndDatafeedSourceEnabled() throws Exception {
new LookbackOnlyTestHelper("lookback-4", "airline-data-disabled-doc-values").setEnableDatafeedSource(true).execute();
}
public void testLookbackOnlyWithSourceDisabled() throws Exception {
@ -132,23 +132,23 @@ public class ScheduledJobIT extends ESRestTestCase {
new LookbackOnlyTestHelper("lookback-6", "airline-data-disabled-source").setAddScriptedFields(true).execute();
}
public void testLookbackOnlyWithNestedFieldsAndSchedulerSourceDisabled() throws Exception {
public void testLookbackOnlyWithNestedFieldsAndDatafeedSourceDisabled() throws Exception {
executeTestLookbackOnlyWithNestedFields("lookback-7", false);
}
public void testLookbackOnlyWithNestedFieldsAndSchedulerSourceEnabled() throws Exception {
public void testLookbackOnlyWithNestedFieldsAndDatafeedSourceEnabled() throws Exception {
executeTestLookbackOnlyWithNestedFields("lookback-8", true);
}
public void testRealtime() throws Exception {
String jobId = "job-realtime-1";
createJob(jobId);
String schedulerId = jobId + "-scheduler";
createScheduler(schedulerId, jobId, "airline-data", false, false);
String datafeedId = jobId + "-datafeed";
createDatafeed(datafeedId, jobId, "airline-data", false, false);
openJob(client(), jobId);
Response response = client().performRequest("post",
MlPlugin.BASE_PATH + "schedulers/" + schedulerId + "/_start?start=2016-06-01T00:00:00Z");
MlPlugin.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z");
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(response), equalTo("{\"started\":true}"));
assertBusy(() -> {
@ -166,16 +166,16 @@ public class ScheduledJobIT extends ESRestTestCase {
() -> client().performRequest("delete", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId));
response = e.getResponse();
assertThat(response.getStatusLine().getStatusCode(), equalTo(409));
assertThat(responseEntityToString(response), containsString("Cannot delete job [" + jobId + "] while scheduler [" + schedulerId
assertThat(responseEntityToString(response), containsString("Cannot delete job [" + jobId + "] while datafeed [" + datafeedId
+ "] refers to it"));
response = client().performRequest("post", MlPlugin.BASE_PATH + "schedulers/" + schedulerId + "/_stop");
response = client().performRequest("post", MlPlugin.BASE_PATH + "datafeeds/" + datafeedId + "/_stop");
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
client().performRequest("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close");
response = client().performRequest("delete", MlPlugin.BASE_PATH + "schedulers/" + schedulerId);
response = client().performRequest("delete", MlPlugin.BASE_PATH + "datafeeds/" + datafeedId);
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
@ -188,7 +188,7 @@ public class ScheduledJobIT extends ESRestTestCase {
private String jobId;
private String dataIndex;
private boolean addScriptedFields;
private boolean enableSchedulerSource;
private boolean enableDatafeedSource;
private boolean shouldSucceedInput;
private boolean shouldSucceedProcessing;
@ -204,8 +204,8 @@ public class ScheduledJobIT extends ESRestTestCase {
return this;
}
public LookbackOnlyTestHelper setEnableSchedulerSource(boolean value) {
enableSchedulerSource = value;
public LookbackOnlyTestHelper setEnableDatafeedSource(boolean value) {
enableDatafeedSource = value;
return this;
}
@ -221,11 +221,11 @@ public class ScheduledJobIT extends ESRestTestCase {
public void execute() throws Exception {
createJob(jobId);
String schedulerId = "scheduler-" + jobId;
createScheduler(schedulerId, jobId, dataIndex, enableSchedulerSource, addScriptedFields);
String datafeedId = "datafeed-" + jobId;
createDatafeed(datafeedId, jobId, dataIndex, enableDatafeedSource, addScriptedFields);
openJob(client(), jobId);
startSchedulerAndWaitUntilStopped(schedulerId);
startDatafeedAndWaitUntilStopped(datafeedId);
Response jobStatsResponse = client().performRequest("get", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
if (shouldSucceedInput) {
@ -242,16 +242,16 @@ public class ScheduledJobIT extends ESRestTestCase {
}
}
private void startSchedulerAndWaitUntilStopped(String schedulerId) throws Exception {
Response startSchedulerRequest = client().performRequest("post",
MlPlugin.BASE_PATH + "schedulers/" + schedulerId + "/_start?start=2016-06-01T00:00:00Z&end=2016-06-02T00:00:00Z");
assertThat(startSchedulerRequest.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(startSchedulerRequest), equalTo("{\"started\":true}"));
private void startDatafeedAndWaitUntilStopped(String datafeedId) throws Exception {
Response startDatafeedRequest = client().performRequest("post",
MlPlugin.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z&end=2016-06-02T00:00:00Z");
assertThat(startDatafeedRequest.getStatusLine().getStatusCode(), equalTo(200));
assertThat(responseEntityToString(startDatafeedRequest), equalTo("{\"started\":true}"));
assertBusy(() -> {
try {
Response schedulerStatsResponse = client().performRequest("get",
MlPlugin.BASE_PATH + "schedulers/" + schedulerId + "/_stats");
assertThat(responseEntityToString(schedulerStatsResponse), containsString("\"status\":\"STOPPED\""));
Response datafeedStatsResponse = client().performRequest("get",
MlPlugin.BASE_PATH + "datafeeds/" + datafeedId + "/_stats");
assertThat(responseEntityToString(datafeedStatsResponse), containsString("\"status\":\"STOPPED\""));
} catch (Exception e) {
throw new RuntimeException(e);
}
@ -270,14 +270,14 @@ public class ScheduledJobIT extends ESRestTestCase {
Collections.emptyMap(), new StringEntity(job));
}
private Response createScheduler(String schedulerId, String jobId, String dataIndex, boolean source, boolean addScriptedFields)
private Response createDatafeed(String datafeedId, String jobId, String dataIndex, boolean source, boolean addScriptedFields)
throws IOException {
String schedulerConfig = "{" + "\"job_id\": \"" + jobId + "\",\n" + "\"indexes\":[\"" + dataIndex + "\"],\n"
String datafeedConfig = "{" + "\"job_id\": \"" + jobId + "\",\n" + "\"indexes\":[\"" + dataIndex + "\"],\n"
+ "\"types\":[\"response\"]" + (source ? ",\"_source\":true" : "") + (addScriptedFields ?
",\"script_fields\":{\"airline\":{\"script\":{\"lang\":\"painless\",\"inline\":\"doc['airline'].value\"}}}" : "")
+"}";
return client().performRequest("put", MlPlugin.BASE_PATH + "schedulers/" + schedulerId, Collections.emptyMap(),
new StringEntity(schedulerConfig));
return client().performRequest("put", MlPlugin.BASE_PATH + "datafeeds/" + datafeedId, Collections.emptyMap(),
new StringEntity(datafeedConfig));
}
private static String responseEntityToString(Response response) throws Exception {
@ -297,11 +297,11 @@ public class ScheduledJobIT extends ESRestTestCase {
+ "}";
client().performRequest("put", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(), new StringEntity(job));
String schedulerId = jobId + "-scheduler";
createScheduler(schedulerId, jobId, "nested-data", source, false);
String datafeedId = jobId + "-datafeed";
createDatafeed(datafeedId, jobId, "nested-data", source, false);
openJob(client(), jobId);
startSchedulerAndWaitUntilStopped(schedulerId);
startDatafeedAndWaitUntilStopped(datafeedId);
Response jobStatsResponse = client().performRequest("get", MlPlugin.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));

View File

@ -25,29 +25,29 @@ public class MlRestTestStateCleaner {
}
public void clearMlMetadata() throws IOException {
deleteAllSchedulers();
deleteAllDatafeeds();
deleteAllJobs();
}
@SuppressWarnings("unchecked")
private void deleteAllSchedulers() throws IOException {
private void deleteAllDatafeeds() throws IOException {
Map<String, Object> clusterStateAsMap = testCase.entityAsMap(client.performRequest("GET", "/_cluster/state",
Collections.singletonMap("filter_path", "metadata.ml.schedulers")));
List<Map<String, Object>> schedulers =
(List<Map<String, Object>>) XContentMapValues.extractValue("metadata.ml.schedulers", clusterStateAsMap);
if (schedulers == null) {
Collections.singletonMap("filter_path", "metadata.ml.datafeeds")));
List<Map<String, Object>> datafeeds =
(List<Map<String, Object>>) XContentMapValues.extractValue("metadata.ml.datafeeds", clusterStateAsMap);
if (datafeeds == null) {
return;
}
for (Map<String, Object> scheduler : schedulers) {
Map<String, Object> schedulerMap = (Map<String, Object>) scheduler.get("config");
String schedulerId = (String) schedulerMap.get("scheduler_id");
for (Map<String, Object> datafeed : datafeeds) {
Map<String, Object> datafeedMap = (Map<String, Object>) datafeed.get("config");
String datafeedId = (String) datafeedMap.get("datafeed_id");
try {
client.performRequest("POST", "/_xpack/ml/schedulers/" + schedulerId + "/_stop");
client.performRequest("POST", "/_xpack/ml/datafeeds/" + datafeedId + "/_stop");
} catch (Exception e) {
// ignore
}
client.performRequest("DELETE", "/_xpack/ml/schedulers/" + schedulerId);
client.performRequest("DELETE", "/_xpack/ml/datafeeds/" + datafeedId);
}
}

View File

@ -20,7 +20,7 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.xpack.ml.MlPlugin;
import org.elasticsearch.xpack.ml.action.OpenJobAction;
import org.elasticsearch.xpack.ml.action.PutJobAction;
import org.elasticsearch.xpack.ml.action.ScheduledJobsIT;
import org.elasticsearch.xpack.ml.action.DatafeedJobsIT;
import org.elasticsearch.xpack.ml.job.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.DataDescription;
import org.elasticsearch.xpack.ml.job.Detector;
@ -54,7 +54,7 @@ public class TooManyJobsIT extends ESIntegTestCase {
@After
public void clearMlMetadata() throws Exception {
ScheduledJobsIT.clearMlMetadata(client());
DatafeedJobsIT.clearMlMetadata(client());
}
public void testCannotStartTooManyAnalyticalProcesses() throws Exception {

View File

@ -19,16 +19,16 @@ import org.elasticsearch.xpack.ml.job.AnalysisConfig;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.job.JobStatus;
import org.elasticsearch.xpack.ml.job.JobTests;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfigTests;
import org.elasticsearch.xpack.ml.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.xpack.ml.datafeed.DatafeedStatus;
import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase;
import java.io.IOException;
import static org.elasticsearch.xpack.ml.job.JobTests.buildJobBuilder;
import static org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests.createScheduledJob;
import static org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests.createSchedulerConfig;
import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedJob;
import static org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests.createDatafeedConfig;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@ -43,8 +43,8 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
for (int i = 0; i < numJobs; i++) {
Job job = JobTests.createRandomizedJob();
if (randomBoolean()) {
SchedulerConfig schedulerConfig = SchedulerConfigTests.createRandomizedSchedulerConfig(job.getId());
if (schedulerConfig.getAggregations() != null) {
DatafeedConfig datafeedConfig = DatafeedConfigTests.createRandomizedDatafeedConfig(job.getId());
if (datafeedConfig.getAggregations() != null) {
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(job.getAnalysisConfig().getDetectors());
analysisConfig.setSummaryCountFieldName("doc_count");
Job.Builder jobBuilder = new Job.Builder(job);
@ -52,9 +52,9 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
job = jobBuilder.build();
}
builder.putJob(job, false);
builder.putScheduler(schedulerConfig);
builder.putDatafeed(datafeedConfig);
if (randomBoolean()) {
builder.updateSchedulerStatus(schedulerConfig.getId(), SchedulerStatus.STARTED);
builder.updateDatafeedStatus(datafeedConfig.getId(), DatafeedStatus.STARTED);
}
} else {
builder.putJob(job, false);
@ -107,10 +107,10 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata result = builder.build();
assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("1"), nullValue());
assertThat(result.getDatafeeds().get("1"), nullValue());
assertThat(result.getJobs().get("2"), sameInstance(job2));
assertThat(result.getAllocations().get("2").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("2"), nullValue());
assertThat(result.getDatafeeds().get("2"), nullValue());
builder = new MlMetadata.Builder(result);
@ -134,19 +134,19 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata result = builder.build();
assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("1"), nullValue());
assertThat(result.getDatafeeds().get("1"), nullValue());
builder = new MlMetadata.Builder(result);
builder.updateStatus("1", JobStatus.DELETING, null);
assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("1"), nullValue());
assertThat(result.getDatafeeds().get("1"), nullValue());
builder.deleteJob("1");
result = builder.build();
assertThat(result.getJobs().get("1"), nullValue());
assertThat(result.getAllocations().get("1"), nullValue());
assertThat(result.getSchedulers().get("1"), nullValue());
assertThat(result.getDatafeeds().get("1"), nullValue());
}
public void testRemoveJob_failBecauseJobIsOpen() {
@ -159,23 +159,23 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
MlMetadata result = builder1.build();
assertThat(result.getJobs().get("1"), sameInstance(job1));
assertThat(result.getAllocations().get("1").getStatus(), equalTo(JobStatus.OPENED));
assertThat(result.getSchedulers().get("1"), nullValue());
assertThat(result.getDatafeeds().get("1"), nullValue());
MlMetadata.Builder builder2 = new MlMetadata.Builder(result);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder2.deleteJob("1"));
assertThat(e.status(), equalTo(RestStatus.CONFLICT));
}
public void testRemoveJob_failSchedulerRefersToJob() {
Job job1 = createScheduledJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build();
public void testRemoveJob_failDatafeedRefersToJob() {
Job job1 = createDatafeedJob().build();
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1);
builder.putDatafeed(datafeedConfig1);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder.deleteJob(job1.getId()));
assertThat(e.status(), equalTo(RestStatus.CONFLICT));
String expectedMsg = "Cannot delete job [" + job1.getId() + "] while scheduler [" + schedulerConfig1.getId() + "] refers to it";
String expectedMsg = "Cannot delete job [" + job1.getId() + "] while datafeed [" + datafeedConfig1.getId() + "] refers to it";
assertThat(e.getMessage(), equalTo(expectedMsg));
}
@ -184,87 +184,87 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
expectThrows(ResourceNotFoundException.class, () -> builder1.deleteJob("1"));
}
public void testCrudScheduler() {
Job job1 = createScheduledJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build();
public void testCrudDatafeed() {
Job job1 = createDatafeedJob().build();
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1);
builder.putDatafeed(datafeedConfig1);
MlMetadata result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("scheduler1").getConfig(), sameInstance(schedulerConfig1));
assertThat(result.getSchedulers().get("scheduler1").getStatus(), equalTo(SchedulerStatus.STOPPED));
assertThat(result.getDatafeeds().get("datafeed1").getConfig(), sameInstance(datafeedConfig1));
assertThat(result.getDatafeeds().get("datafeed1").getStatus(), equalTo(DatafeedStatus.STOPPED));
builder = new MlMetadata.Builder(result);
builder.removeScheduler("scheduler1");
builder.removeDatafeed("datafeed1");
result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.CLOSED));
assertThat(result.getSchedulers().get("scheduler1"), nullValue());
assertThat(result.getDatafeeds().get("datafeed1"), nullValue());
}
public void testPutScheduler_failBecauseJobDoesNotExist() {
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", "missing-job").build();
public void testPutDatafeed_failBecauseJobDoesNotExist() {
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", "missing-job").build();
MlMetadata.Builder builder = new MlMetadata.Builder();
expectThrows(ResourceNotFoundException.class, () -> builder.putScheduler(schedulerConfig1));
expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1));
}
public void testPutScheduler_failBecauseSchedulerIdIsAlreadyTaken() {
Job job1 = createScheduledJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build();
public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() {
Job job1 = createDatafeedJob().build();
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1);
builder.putDatafeed(datafeedConfig1);
expectThrows(ResourceAlreadyExistsException.class, () -> builder.putScheduler(schedulerConfig1));
expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1));
}
public void testPutScheduler_failBecauseJobAlreadyHasScheduler() {
Job job1 = createScheduledJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build();
SchedulerConfig schedulerConfig2 = createSchedulerConfig("scheduler2", job1.getId()).build();
public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() {
Job job1 = createDatafeedJob().build();
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
DatafeedConfig datafeedConfig2 = createDatafeedConfig("datafeed2", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1);
builder.putDatafeed(datafeedConfig1);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
() -> builder.putScheduler(schedulerConfig2));
() -> builder.putDatafeed(datafeedConfig2));
assertThat(e.status(), equalTo(RestStatus.CONFLICT));
}
public void testPutScheduler_failBecauseJobIsNotCompatibleForScheduler() {
Job.Builder job1 = createScheduledJob();
public void testPutDatafeed_failBecauseJobIsNotCompatibleForDatafeed() {
Job.Builder job1 = createDatafeedJob();
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(job1.build().getAnalysisConfig());
analysisConfig.setLatency(3600L);
job1.setAnalysisConfig(analysisConfig);
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build();
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1.build(), false);
expectThrows(IllegalArgumentException.class, () -> builder.putScheduler(schedulerConfig1));
expectThrows(IllegalArgumentException.class, () -> builder.putDatafeed(datafeedConfig1));
}
public void testRemoveScheduler_failBecauseSchedulerStarted() {
Job job1 = createScheduledJob().build();
SchedulerConfig schedulerConfig1 = createSchedulerConfig("scheduler1", job1.getId()).build();
public void testRemoveDatafeed_failBecauseDatafeedStarted() {
Job job1 = createDatafeedJob().build();
DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build();
MlMetadata.Builder builder = new MlMetadata.Builder();
builder.putJob(job1, false);
builder.putScheduler(schedulerConfig1);
builder.putDatafeed(datafeedConfig1);
builder.updateStatus("foo", JobStatus.OPENING, null);
builder.updateStatus("foo", JobStatus.OPENED, null);
builder.updateSchedulerStatus("scheduler1", SchedulerStatus.STARTED);
builder.updateDatafeedStatus("datafeed1", DatafeedStatus.STARTED);
MlMetadata result = builder.build();
assertThat(result.getJobs().get("foo"), sameInstance(job1));
assertThat(result.getAllocations().get("foo").getStatus(), equalTo(JobStatus.OPENED));
assertThat(result.getSchedulers().get("scheduler1").getConfig(), sameInstance(schedulerConfig1));
assertThat(result.getSchedulers().get("scheduler1").getStatus(), equalTo(SchedulerStatus.STARTED));
assertThat(result.getDatafeeds().get("datafeed1").getConfig(), sameInstance(datafeedConfig1));
assertThat(result.getDatafeeds().get("datafeed1").getStatus(), equalTo(DatafeedStatus.STARTED));
MlMetadata.Builder builder2 = new MlMetadata.Builder(result);
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder2.removeScheduler("scheduler1"));
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder2.removeDatafeed("datafeed1"));
assertThat(e.status(), equalTo(RestStatus.CONFLICT));
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.rest.schedulers;
package org.elasticsearch.xpack.ml.rest.datafeeds;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.node.NodeClient;
@ -14,24 +14,24 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
import org.elasticsearch.xpack.ml.job.Job;
import org.elasticsearch.xpack.ml.scheduler.ScheduledJobRunnerTests;
import org.elasticsearch.xpack.ml.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunnerTests;
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
import java.util.HashMap;
import java.util.Map;
import static org.mockito.Mockito.mock;
public class RestStartJobSchedulerActionTests extends ESTestCase {
public class RestStartJobDatafeedActionTests extends ESTestCase {
public void testPrepareRequest() throws Exception {
Job.Builder job = ScheduledJobRunnerTests.createScheduledJob();
SchedulerConfig schedulerConfig = ScheduledJobRunnerTests.createSchedulerConfig("foo-scheduler", "foo").build();
RestStartSchedulerAction action = new RestStartSchedulerAction(Settings.EMPTY, mock(RestController.class));
Job.Builder job = DatafeedJobRunnerTests.createDatafeedJob();
DatafeedConfig datafeedConfig = DatafeedJobRunnerTests.createDatafeedConfig("foo-datafeed", "foo").build();
RestStartDatafeedAction action = new RestStartDatafeedAction(Settings.EMPTY, mock(RestController.class));
Map<String, String> params = new HashMap<>();
params.put("start", "not-a-date");
params.put("scheduler_id", "foo-scheduler");
params.put("datafeed_id", "foo-datafeed");
RestRequest restRequest1 = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build();
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> action.prepareRequest(restRequest1, mock(NodeClient.class)));
@ -40,7 +40,7 @@ public class RestStartJobSchedulerActionTests extends ESTestCase {
params = new HashMap<>();
params.put("end", "not-a-date");
params.put("scheduler_id", "foo-scheduler");
params.put("datafeed_id", "foo-datafeed");
RestRequest restRequest2 = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build();
e = expectThrows(ElasticsearchParseException.class, () -> action.prepareRequest(restRequest2, mock(NodeClient.class)));
assertEquals("Query param 'end' with value 'not-a-date' cannot be parsed as a date or converted to a number (epoch).",
@ -48,11 +48,11 @@ public class RestStartJobSchedulerActionTests extends ESTestCase {
}
public void testParseDateOrThrow() {
assertEquals(0L, RestStartSchedulerAction.parseDateOrThrow("0", "start"));
assertEquals(0L, RestStartSchedulerAction.parseDateOrThrow("1970-01-01T00:00:00Z", "start"));
assertEquals(0L, RestStartDatafeedAction.parseDateOrThrow("0", "start"));
assertEquals(0L, RestStartDatafeedAction.parseDateOrThrow("1970-01-01T00:00:00Z", "start"));
Exception e = expectThrows(ElasticsearchParseException.class,
() -> RestStartSchedulerAction.parseDateOrThrow("not-a-date", "start"));
() -> RestStartDatafeedAction.parseDateOrThrow("not-a-date", "start"));
assertEquals("Query param 'start' with value 'not-a-date' cannot be parsed as a date or converted to a number (epoch).",
e.getMessage());
}

View File

@ -1,22 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
import org.elasticsearch.test.ESTestCase;
public class SchedulerStatusTests extends ESTestCase {
public void testForString() {
assertEquals(SchedulerStatus.fromString("started"), SchedulerStatus.STARTED);
assertEquals(SchedulerStatus.fromString("stopped"), SchedulerStatus.STOPPED);
}
public void testValidOrdinals() {
assertEquals(0, SchedulerStatus.STARTED.ordinal());
assertEquals(1, SchedulerStatus.STOPPED.ordinal());
}
}

View File

@ -1,29 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.scheduler;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.ml.support.AbstractSerializingTestCase;
public class SchedulerTests extends AbstractSerializingTestCase<Scheduler> {
@Override
protected Scheduler createTestInstance() {
return new Scheduler(SchedulerConfigTests.createRandomizedSchedulerConfig(randomAsciiOfLength(10)),
randomFrom(SchedulerStatus.values()));
}
@Override
protected Writeable.Reader<Scheduler> instanceReader() {
return Scheduler::new;
}
@Override
protected Scheduler parseInstance(XContentParser parser) {
return Scheduler.PARSER.apply(parser, null);
}
}

View File

@ -0,0 +1,17 @@
{
"xpack.ml.delete_datafeed": {
"methods": [ "DELETE" ],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}",
"paths": [ "/_xpack/ml/datafeeds/{datafeed_id}" ],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeed to delete"
}
}
},
"body": null
}
}

View File

@ -1,17 +0,0 @@
{
"xpack.ml.delete_scheduler": {
"methods": [ "DELETE" ],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}",
"paths": [ "/_xpack/ml/schedulers/{scheduler_id}" ],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the scheduler to delete"
}
}
},
"body": null
}
}

View File

@ -0,0 +1,19 @@
{
"xpack.ml.get_datafeeds": {
"methods": [ "GET"],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}",
"paths": [
"/_xpack/ml/datafeeds/{datafeed_id}"
],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeeds to fetch"
}
}
},
"body": null
}
}

View File

@ -0,0 +1,19 @@
{
"xpack.ml.get_datafeeds_stats": {
"methods": [ "GET"],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}/_stats",
"paths": [
"/_xpack/ml/datafeeds/{datafeed_id}/_stats"
],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeeds stats to fetch"
}
}
},
"body": null
}
}

View File

@ -1,19 +0,0 @@
{
"xpack.ml.get_schedulers": {
"methods": [ "GET"],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}",
"paths": [
"/_xpack/ml/schedulers/{scheduler_id}"
],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the schedulers to fetch"
}
}
},
"body": null
}
}

View File

@ -1,19 +0,0 @@
{
"xpack.ml.get_schedulers_stats": {
"methods": [ "GET"],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}/_stats",
"paths": [
"/_xpack/ml/schedulers/{scheduler_id}/_stats"
],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the schedulers stats to fetch"
}
}
},
"body": null
}
}

View File

@ -0,0 +1,20 @@
{
"xpack.ml.put_datafeed": {
"methods": [ "PUT" ],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}",
"paths": [ "/_xpack/ml/datafeeds/{datafeed_id}" ],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeed to create"
}
}
},
"body": {
"description" : "The datafeed config",
"required" : true
}
}
}

View File

@ -1,20 +0,0 @@
{
"xpack.ml.put_scheduler": {
"methods": [ "PUT" ],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}",
"paths": [ "/_xpack/ml/schedulers/{scheduler_id}" ],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the scheduler to create"
}
}
},
"body": {
"description" : "The scheduler config",
"required" : true
}
}
}

View File

@ -0,0 +1,35 @@
{
"xpack.ml.start_datafeed": {
"methods": [ "POST" ],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}/_start",
"paths": [ "/_xpack/ml/datafeeds/{datafeed_id}/_start" ],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeed to start"
}
},
"params": {
"start": {
"type": "string",
"required": false,
"description": "The start time from where the datafeed should begin"
},
"end": {
"type": "string",
"required": false,
"description": "The end time when the datafeed should stop. When not set, the datafeed continues in real time"
},
"start_timeout": {
"type": "time",
"description": "Controls the time to wait until a datafeed has started. Default to 30 seconds"
}
}
},
"body": {
"description": "The start datafeed parameters"
}
}
}

View File

@ -1,35 +0,0 @@
{
"xpack.ml.start_scheduler": {
"methods": [ "POST" ],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}/_start",
"paths": [ "/_xpack/ml/schedulers/{scheduler_id}/_start" ],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the scheduler to start"
}
},
"params": {
"start": {
"type": "string",
"required": false,
"description": "The start time from where the scheduler should begin"
},
"end": {
"type": "string",
"required": false,
"description": "The end time when the scheduler should stop. When not set, the scheduler continues in real time"
},
"start_timeout": {
"type": "time",
"description": "Controls the time to wait until a scheduler has started. Default to 30 seconds"
}
}
},
"body": {
"description": "The start scheduler parameters"
}
}
}

View File

@ -0,0 +1,25 @@
{
"xpack.ml.stop_datafeed": {
"methods": [
"POST"
],
"url": {
"path": "/_xpack/ml/datafeeds/{datafeed_id}/_stop",
"paths": [
"/_xpack/ml/datafeeds/{datafeed_id}/_stop"
],
"parts": {
"datafeed_id": {
"type": "string",
"required": true,
"description": "The ID of the datafeed to stop"
},
"stop_timeout": {
"type": "time",
"description": "Controls the time to wait until a datafeed has stopped. Default to 30 seconds"
}
},
"body": null
}
}
}

View File

@ -1,25 +0,0 @@
{
"xpack.ml.stop_scheduler": {
"methods": [
"POST"
],
"url": {
"path": "/_xpack/ml/schedulers/{scheduler_id}/_stop",
"paths": [
"/_xpack/ml/schedulers/{scheduler_id}/_stop"
],
"parts": {
"scheduler_id": {
"type": "string",
"required": true,
"description": "The ID of the scheduler to stop"
},
"stop_timeout": {
"type": "time",
"description": "Controls the time to wait until a scheduler has stopped. Default to 30 seconds"
}
},
"body": null
}
}
}

View File

@ -31,26 +31,26 @@ setup:
}
---
"Test get all schedulers and stats given no scheduler exists":
"Test get all datafeeds and stats given no datafeed exists":
- do:
xpack.ml.get_schedulers:
scheduler_id: "_all"
xpack.ml.get_datafeeds:
datafeed_id: "_all"
- match: { count: 0 }
- match: { schedulers: [] }
- match: { datafeeds: [] }
- do:
xpack.ml.get_schedulers_stats:
scheduler_id: "_all"
xpack.ml.get_datafeeds_stats:
datafeed_id: "_all"
- match: { count: 0 }
- match: { schedulers: [] }
- match: { datafeeds: [] }
---
"Test put scheduler referring to missing job_id":
"Test put datafeed referring to missing job_id":
- do:
catch: /resource_not_found_exception/
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-1
body: >
{
"job_id":"a-missing-job",
@ -59,35 +59,35 @@ setup:
}
---
"Test put scheduler referring to existing job_id":
"Test put datafeed referring to existing job_id":
- do:
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-1
body: >
{
"job_id":"job-1",
"indexes":["index-foo"],
"types":["type-bar"]
}
- match: { scheduler_id: "test-scheduler-1" }
- match: { datafeed_id: "test-datafeed-1" }
---
"Test put scheduler whose id is already taken":
"Test put datafeed whose id is already taken":
- do:
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-1
body: >
{
"job_id":"job-1",
"indexes":["index-foo"],
"types":["type-bar"]
}
- match: { scheduler_id: "test-scheduler-1" }
- match: { datafeed_id: "test-datafeed-1" }
- do:
catch: /resource_already_exists_exception/
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-1
body: >
{
"job_id":"job-2",
@ -96,22 +96,22 @@ setup:
}
---
"Test put scheduler with job_id that is already used by another scheduler":
"Test put datafeed with job_id that is already used by another datafeed":
- do:
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-1
body: >
{
"job_id":"job-1",
"indexes":["index-foo"],
"types":["type-bar"]
}
- match: { scheduler_id: "test-scheduler-1" }
- match: { datafeed_id: "test-datafeed-1" }
- do:
catch: /A scheduler \[test-scheduler-1\] already exists for job \[job-1\]/
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-2
catch: /A datafeed \[test-datafeed-1\] already exists for job \[job-1\]/
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-2
body: >
{
"job_id":"job-1",
@ -120,11 +120,11 @@ setup:
}
---
"Test put scheduler with invalid query":
"Test put datafeed with invalid query":
- do:
catch: /parsing_exception/
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-1
body: >
{
"job_id":"job-1",
@ -134,26 +134,26 @@ setup:
}
---
"Test delete scheduler with missing id":
"Test delete datafeed with missing id":
- do:
catch: /resource_not_found_exception/
xpack.ml.delete_scheduler:
scheduler_id: a-missing-scheduler
xpack.ml.delete_datafeed:
datafeed_id: a-missing-datafeed
---
"Test delete scheduler":
"Test delete datafeed":
- do:
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-1
body: >
{
"job_id":"job-1",
"indexes":["index-foo"],
"types":["type-bar"]
}
- match: { scheduler_id: "test-scheduler-1" }
- match: { datafeed_id: "test-datafeed-1" }
- do:
xpack.ml.delete_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.delete_datafeed:
datafeed_id: test-datafeed-1
- match: { acknowledged: true }

View File

@ -32,8 +32,8 @@ setup:
}
- do:
xpack.ml.put_scheduler:
scheduler_id: scheduler-1
xpack.ml.put_datafeed:
datafeed_id: datafeed-1
body: >
{
"job_id":"job-1",
@ -42,8 +42,8 @@ setup:
}
- do:
xpack.ml.put_scheduler:
scheduler_id: scheduler-2
xpack.ml.put_datafeed:
datafeed_id: datafeed-2
body: >
{
"job_id":"job-2",
@ -52,36 +52,36 @@ setup:
}
---
"Test get scheduler given missing scheduler_id":
"Test get datafeed given missing datafeed_id":
- do:
catch: missing
xpack.ml.get_schedulers:
scheduler_id: missing-scheduler
xpack.ml.get_datafeeds:
datafeed_id: missing-datafeed
---
"Test get single scheduler":
"Test get single datafeed":
- do:
xpack.ml.get_schedulers:
scheduler_id: scheduler-1
- match: { schedulers.0.scheduler_id: "scheduler-1"}
- match: { schedulers.0.job_id: "job-1"}
xpack.ml.get_datafeeds:
datafeed_id: datafeed-1
- match: { datafeeds.0.datafeed_id: "datafeed-1"}
- match: { datafeeds.0.job_id: "job-1"}
- do:
xpack.ml.get_schedulers:
scheduler_id: scheduler-2
- match: { schedulers.0.scheduler_id: "scheduler-2"}
- match: { schedulers.0.job_id: "job-2"}
xpack.ml.get_datafeeds:
datafeed_id: datafeed-2
- match: { datafeeds.0.datafeed_id: "datafeed-2"}
- match: { datafeeds.0.job_id: "job-2"}
---
"Test get all schedulers":
"Test get all datafeeds":
- do:
xpack.ml.get_schedulers:
scheduler_id: _all
xpack.ml.get_datafeeds:
datafeed_id: _all
- match: { count: 2 }
- match: { schedulers.0.scheduler_id: "scheduler-1"}
- match: { schedulers.0.job_id: "job-1"}
- match: { schedulers.1.scheduler_id: "scheduler-2"}
- match: { schedulers.1.job_id: "job-2"}
- match: { datafeeds.0.datafeed_id: "datafeed-1"}
- match: { datafeeds.0.job_id: "job-1"}
- match: { datafeeds.1.datafeed_id: "datafeed-2"}
- match: { datafeeds.1.job_id: "job-2"}

View File

@ -32,8 +32,8 @@ setup:
}
- do:
xpack.ml.put_scheduler:
scheduler_id: scheduler-1
xpack.ml.put_datafeed:
datafeed_id: datafeed-1
body: >
{
"job_id":"job-1",
@ -42,8 +42,8 @@ setup:
}
- do:
xpack.ml.put_scheduler:
scheduler_id: scheduler-2
xpack.ml.put_datafeed:
datafeed_id: datafeed-2
body: >
{
"job_id":"job-2",
@ -52,36 +52,36 @@ setup:
}
---
"Test get scheduler stats given missing scheduler_id":
"Test get datafeed stats given missing datafeed_id":
- do:
catch: missing
xpack.ml.get_schedulers_stats:
scheduler_id: missing-scheduler
xpack.ml.get_datafeeds_stats:
datafeed_id: missing-datafeed
---
"Test get single scheduler stats":
"Test get single datafeed stats":
- do:
xpack.ml.get_schedulers_stats:
scheduler_id: scheduler-1
- match: { schedulers.0.scheduler_id: "scheduler-1"}
- match: { schedulers.0.status: "STOPPED"}
xpack.ml.get_datafeeds_stats:
datafeed_id: datafeed-1
- match: { datafeeds.0.datafeed_id: "datafeed-1"}
- match: { datafeeds.0.status: "STOPPED"}
- do:
xpack.ml.get_schedulers_stats:
scheduler_id: scheduler-2
- match: { schedulers.0.scheduler_id: "scheduler-2"}
- match: { schedulers.0.status: "STOPPED"}
xpack.ml.get_datafeeds_stats:
datafeed_id: datafeed-2
- match: { datafeeds.0.datafeed_id: "datafeed-2"}
- match: { datafeeds.0.status: "STOPPED"}
---
"Test get all schedulers stats":
"Test get all datafeeds stats":
- do:
xpack.ml.get_schedulers_stats:
scheduler_id: _all
xpack.ml.get_datafeeds_stats:
datafeed_id: _all
- match: { count: 2 }
- match: { schedulers.0.scheduler_id: "scheduler-1"}
- match: { schedulers.0.status: "STOPPED"}
- match: { schedulers.1.scheduler_id: "scheduler-2"}
- match: { schedulers.1.status: "STOPPED"}
- match: { datafeeds.0.datafeed_id: "datafeed-1"}
- match: { datafeeds.0.status: "STOPPED"}
- match: { datafeeds.1.datafeed_id: "datafeed-2"}
- match: { datafeeds.1.status: "STOPPED"}

View File

@ -182,13 +182,13 @@
}
---
"Test delete job that is referred by a scheduler":
"Test delete job that is referred by a datafeed":
- do:
xpack.ml.put_job:
job_id: scheduler-job
job_id: datafeed-job
body: >
{
"job_id":"scheduler-job",
"job_id":"datafeed-job",
"description":"Analysis of response time by airline",
"analysis_config" : {
"bucket_span":3600,
@ -200,20 +200,20 @@
"time_format":"yyyy-MM-dd HH:mm:ssX"
}
}
- match: { job_id: "scheduler-job" }
- match: { job_id: "datafeed-job" }
- do:
xpack.ml.put_scheduler:
scheduler_id: test-scheduler-1
xpack.ml.put_datafeed:
datafeed_id: test-datafeed-1
body: >
{
"job_id":"scheduler-job",
"job_id":"datafeed-job",
"indexes":["index-foo"],
"types":["type-bar"]
}
- match: { scheduler_id: "test-scheduler-1" }
- match: { datafeed_id: "test-datafeed-1" }
- do:
catch: /Cannot delete job \[scheduler-job\] while scheduler \[test-scheduler-1\] refers to it/
catch: /Cannot delete job \[datafeed-job\] while datafeed \[test-datafeed-1\] refers to it/
xpack.ml.delete_job:
job_id: scheduler-job
job_id: datafeed-job

View File

@ -23,11 +23,11 @@ setup:
- do:
xpack.ml.put_job:
job_id: scheduled-job
job_id: datafeed-job
body: >
{
"job_id":"scheduled-job",
"description":"A job with a scheduler",
"job_id":"datafeed-job",
"description":"A job with a datafeed",
"analysis_config" : {
"bucket_span":3600,
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
@ -40,14 +40,14 @@ setup:
}
- do:
xpack.ml.open_job:
job_id: scheduled-job
job_id: datafeed-job
- do:
xpack.ml.put_scheduler:
scheduler_id: scheduler-1
xpack.ml.put_datafeed:
datafeed_id: datafeed-1
body: >
{
"job_id":"scheduled-job",
"job_id":"datafeed-job",
"indexes":["farequote"],
"types":["response"]
}
@ -93,12 +93,12 @@ setup:
- match: { jobs.0.status: OPENED }
---
"Test get job stats of scheduled job that has not received and data":
"Test get job stats of datafeed job that has not received and data":
- do:
xpack.ml.get_job_stats:
job_id: scheduled-job
- match: { jobs.0.job_id : scheduled-job }
job_id: datafeed-job
- match: { jobs.0.job_id : datafeed-job }
- match: { jobs.0.data_counts.processed_record_count: 0 }
- is_false: jobs.0.model_size_stats
- match: { jobs.0.status: OPENED }

View File

@ -0,0 +1,116 @@
setup:
- do:
indices.create:
index: airline-data
body:
mappings:
response:
properties:
time:
type: date
- do:
xpack.ml.put_job:
job_id: datafeed-job
body: >
{
"job_id":"datafeed-job",
"description":"Analysis of response time by airline",
"analysis_config" : {
"bucket_span":3600,
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
},
"data_description" : {
"format":"JSON",
"time_field":"time",
"time_format":"epoch"
}
}
- do:
xpack.ml.put_datafeed:
datafeed_id: datafeed-1
body: >
{
"job_id":"datafeed-job",
"indexes":"airline-data",
"types":"response"
}
---
"Test start and stop datafeed happy path":
- do:
xpack.ml.open_job:
job_id: "datafeed-job"
- do:
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
- do:
xpack.ml.get_datafeeds_stats:
datafeed_id: "datafeed-1"
- match: { datafeeds.0.status: STARTED }
- do:
xpack.ml.stop_datafeed:
"datafeed_id": "datafeed-1"
- do:
xpack.ml.get_datafeeds_stats:
datafeed_id: "datafeed-1"
- match: { datafeeds.0.status: STOPPED }
---
"Test start non existing datafeed":
- do:
catch: missing
xpack.ml.start_datafeed:
"datafeed_id": "non-existing-datafeed"
"start": 0
---
"Test start datafeed job, but not open":
- do:
catch: conflict
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
- do:
catch: /cannot start datafeed, expected job status \[OPENED\], but got \[CLOSED\]/
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
---
"Test start already started datafeed job":
- do:
xpack.ml.open_job:
job_id: "datafeed-job"
- do:
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
- do:
catch: conflict
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
- do:
catch: /datafeed already started, expected datafeed status \[STOPPED\], but got \[STARTED\]/
xpack.ml.start_datafeed:
"datafeed_id": "datafeed-1"
"start": 0
---
"Test stop non existing datafeed":
- do:
catch: missing
xpack.ml.stop_datafeed:
"datafeed_id": "non-existing-datafeed"
---
"Test stop already stopped datafeed job":
- do:
catch: conflict
xpack.ml.stop_datafeed:
"datafeed_id": "datafeed-1"
- do:
catch: /datafeed already stopped, expected datafeed status \[STARTED\], but got \[STOPPED\]/
xpack.ml.stop_datafeed:
"datafeed_id": "datafeed-1"

View File

@ -1,116 +0,0 @@
setup:
- do:
indices.create:
index: airline-data
body:
mappings:
response:
properties:
time:
type: date
- do:
xpack.ml.put_job:
job_id: scheduled-job
body: >
{
"job_id":"scheduled-job",
"description":"Analysis of response time by airline",
"analysis_config" : {
"bucket_span":3600,
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
},
"data_description" : {
"format":"JSON",
"time_field":"time",
"time_format":"epoch"
}
}
- do:
xpack.ml.put_scheduler:
scheduler_id: scheduler-1
body: >
{
"job_id":"scheduled-job",
"indexes":"airline-data",
"types":"response"
}
---
"Test start and stop scheduler happy path":
- do:
xpack.ml.open_job:
job_id: "scheduled-job"
- do:
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
- do:
xpack.ml.get_schedulers_stats:
scheduler_id: "scheduler-1"
- match: { schedulers.0.status: STARTED }
- do:
xpack.ml.stop_scheduler:
"scheduler_id": "scheduler-1"
- do:
xpack.ml.get_schedulers_stats:
scheduler_id: "scheduler-1"
- match: { schedulers.0.status: STOPPED }
---
"Test start non existing scheduler":
- do:
catch: missing
xpack.ml.start_scheduler:
"scheduler_id": "non-existing-scheduler"
"start": 0
---
"Test start scheduled job, but not open":
- do:
catch: conflict
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
- do:
catch: /cannot start scheduler, expected job status \[OPENED\], but got \[CLOSED\]/
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
---
"Test start already started scheduled job":
- do:
xpack.ml.open_job:
job_id: "scheduled-job"
- do:
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
- do:
catch: conflict
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
- do:
catch: /scheduler already started, expected scheduler status \[STOPPED\], but got \[STARTED\]/
xpack.ml.start_scheduler:
"scheduler_id": "scheduler-1"
"start": 0
---
"Test stop non existing scheduler":
- do:
catch: missing
xpack.ml.stop_scheduler:
"scheduler_id": "non-existing-scheduler"
---
"Test stop already stopped scheduled job":
- do:
catch: conflict
xpack.ml.stop_scheduler:
"scheduler_id": "scheduler-1"
- do:
catch: /scheduler already stopped, expected scheduler status \[STARTED\], but got \[STOPPED\]/
xpack.ml.stop_scheduler:
"scheduler_id": "scheduler-1"