mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-09 14:34:43 +00:00
Migrates machine learning into X-Pack
Original commit: elastic/x-pack-elasticsearch@9ad22980b8
This commit is contained in:
commit
1da752152a
@ -0,0 +1,361 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.NamedDiff;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugins.ActionPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ExecutorBuilder;
|
||||
import org.elasticsearch.threadpool.FixedExecutorBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.ml.action.CloseJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.action.DeleteFilterAction;
|
||||
import org.elasticsearch.xpack.ml.action.DeleteJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.DeleteModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.ml.action.FlushJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetBucketsAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetCategoriesAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetDatafeedsAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetFiltersAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetInfluencersAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetJobsAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetJobsStatsAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetModelSnapshotsAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetRecordsAction;
|
||||
import org.elasticsearch.xpack.ml.action.InternalOpenJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.MlDeleteByQueryAction;
|
||||
import org.elasticsearch.xpack.ml.action.OpenJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.PostDataAction;
|
||||
import org.elasticsearch.xpack.ml.action.PutDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.action.PutFilterAction;
|
||||
import org.elasticsearch.xpack.ml.action.PutJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.action.StopDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.action.UpdateJobStateAction;
|
||||
import org.elasticsearch.xpack.ml.action.UpdateJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.UpdateModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.ml.action.UpdateProcessAction;
|
||||
import org.elasticsearch.xpack.ml.action.ValidateDetectorAction;
|
||||
import org.elasticsearch.xpack.ml.action.ValidateJobConfigAction;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlInitializationService;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister;
|
||||
import org.elasticsearch.xpack.ml.job.process.DataCountsReporter;
|
||||
import org.elasticsearch.xpack.ml.job.process.NativeController;
|
||||
import org.elasticsearch.xpack.ml.job.process.ProcessCtrl;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessFactory;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.BlackHoleAutodetectProcess;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.NativeAutodetectProcessFactory;
|
||||
import org.elasticsearch.xpack.ml.job.process.normalizer.MultiplyingNormalizerProcess;
|
||||
import org.elasticsearch.xpack.ml.job.process.normalizer.NativeNormalizerProcessFactory;
|
||||
import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerFactory;
|
||||
import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerProcessFactory;
|
||||
import org.elasticsearch.xpack.ml.rest.datafeeds.RestDeleteDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.rest.datafeeds.RestGetDatafeedStatsAction;
|
||||
import org.elasticsearch.xpack.ml.rest.datafeeds.RestGetDatafeedsAction;
|
||||
import org.elasticsearch.xpack.ml.rest.datafeeds.RestPutDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.rest.datafeeds.RestStartDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.rest.datafeeds.RestStopDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.rest.filter.RestDeleteFilterAction;
|
||||
import org.elasticsearch.xpack.ml.rest.filter.RestGetFiltersAction;
|
||||
import org.elasticsearch.xpack.ml.rest.filter.RestPutFilterAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestCloseJobAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestDeleteJobAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestFlushJobAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestGetJobStatsAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestGetJobsAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestOpenJobAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestPostDataAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestPostJobUpdateAction;
|
||||
import org.elasticsearch.xpack.ml.rest.job.RestPutJobAction;
|
||||
import org.elasticsearch.xpack.ml.rest.modelsnapshots.RestDeleteModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.ml.rest.modelsnapshots.RestGetModelSnapshotsAction;
|
||||
import org.elasticsearch.xpack.ml.rest.modelsnapshots.RestRevertModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.ml.rest.modelsnapshots.RestUpdateModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.ml.rest.results.RestGetBucketsAction;
|
||||
import org.elasticsearch.xpack.ml.rest.results.RestGetCategoriesAction;
|
||||
import org.elasticsearch.xpack.ml.rest.results.RestGetInfluencersAction;
|
||||
import org.elasticsearch.xpack.ml.rest.results.RestGetRecordsAction;
|
||||
import org.elasticsearch.xpack.ml.rest.validate.RestValidateDetectorAction;
|
||||
import org.elasticsearch.xpack.ml.rest.validate.RestValidateJobConfigAction;
|
||||
import org.elasticsearch.xpack.ml.utils.NamedPipeHelper;
|
||||
import org.elasticsearch.xpack.persistent.CompletionPersistentTaskAction;
|
||||
import org.elasticsearch.xpack.persistent.PersistentActionCoordinator;
|
||||
import org.elasticsearch.xpack.persistent.PersistentActionRegistry;
|
||||
import org.elasticsearch.xpack.persistent.PersistentActionRequest;
|
||||
import org.elasticsearch.xpack.persistent.PersistentActionService;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTaskClusterService;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
|
||||
import org.elasticsearch.xpack.persistent.RemovePersistentTaskAction;
|
||||
import org.elasticsearch.xpack.persistent.StartPersistentTaskAction;
|
||||
import org.elasticsearch.xpack.persistent.UpdatePersistentTaskStatusAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
public class MlPlugin extends Plugin implements ActionPlugin {
|
||||
public static final String NAME = "ml";
|
||||
public static final String BASE_PATH = "/_xpack/ml/";
|
||||
public static final String THREAD_POOL_NAME = NAME;
|
||||
public static final String DATAFEED_RUNNER_THREAD_POOL_NAME = NAME + "_datafeed_runner";
|
||||
public static final String AUTODETECT_PROCESS_THREAD_POOL_NAME = NAME + "_autodetect_process";
|
||||
|
||||
// NORELEASE - temporary solution
|
||||
public static final Setting<Boolean> USE_NATIVE_PROCESS_OPTION = Setting.boolSetting("useNativeProcess", true, Property.NodeScope,
|
||||
Property.Deprecated);
|
||||
|
||||
/** Setting for enabling or disabling machine learning. Defaults to true. */
|
||||
public static final Setting<Boolean> ML_ENABLED = Setting.boolSetting("xpack.ml.enabled", false, Setting.Property.NodeScope);
|
||||
|
||||
private final Settings settings;
|
||||
private final Environment env;
|
||||
private boolean enabled;
|
||||
|
||||
public MlPlugin(Settings settings) {
|
||||
this(settings, new Environment(settings));
|
||||
}
|
||||
|
||||
public MlPlugin(Settings settings, Environment env) {
|
||||
this.enabled = ML_ENABLED.get(settings);
|
||||
this.settings = settings;
|
||||
this.env = env;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Collections.unmodifiableList(
|
||||
Arrays.asList(USE_NATIVE_PROCESS_OPTION,
|
||||
ML_ENABLED,
|
||||
ProcessCtrl.DONT_PERSIST_MODEL_STATE_SETTING,
|
||||
ProcessCtrl.MAX_ANOMALY_RECORDS_SETTING,
|
||||
DataCountsReporter.ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING,
|
||||
DataCountsReporter.ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING,
|
||||
AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
||||
return Arrays.asList(
|
||||
new NamedWriteableRegistry.Entry(MetaData.Custom.class, "ml", MlMetadata::new),
|
||||
new NamedWriteableRegistry.Entry(NamedDiff.class, "ml", MlMetadata.MlMetadataDiff::new),
|
||||
new NamedWriteableRegistry.Entry(PersistentActionCoordinator.Status.class,
|
||||
PersistentActionCoordinator.Status.NAME, PersistentActionCoordinator.Status::new),
|
||||
new NamedWriteableRegistry.Entry(ClusterState.Custom.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::new),
|
||||
new NamedWriteableRegistry.Entry(NamedDiff.class, PersistentTasksInProgress.TYPE, PersistentTasksInProgress::readDiffFrom),
|
||||
new NamedWriteableRegistry.Entry(PersistentActionRequest.class, StartDatafeedAction.NAME, StartDatafeedAction.Request::new)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<NamedXContentRegistry.Entry> getNamedXContent() {
|
||||
NamedXContentRegistry.Entry entry = new NamedXContentRegistry.Entry(
|
||||
MetaData.Custom.class,
|
||||
new ParseField("ml"),
|
||||
parser -> MlMetadata.ML_METADATA_PARSER.parse(parser, null).build()
|
||||
);
|
||||
return Collections.singletonList(entry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
|
||||
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
|
||||
NamedXContentRegistry xContentRegistry) {
|
||||
if (false == enabled) {
|
||||
return emptyList();
|
||||
}
|
||||
JobResultsPersister jobResultsPersister = new JobResultsPersister(settings, client);
|
||||
JobProvider jobProvider = new JobProvider(client, 0);
|
||||
JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(settings, client);
|
||||
|
||||
JobManager jobManager = new JobManager(settings, jobProvider, jobResultsPersister, clusterService);
|
||||
AutodetectProcessFactory autodetectProcessFactory;
|
||||
NormalizerProcessFactory normalizerProcessFactory;
|
||||
if (USE_NATIVE_PROCESS_OPTION.get(settings)) {
|
||||
try {
|
||||
NativeController nativeController = new NativeController(env, new NamedPipeHelper());
|
||||
nativeController.tailLogsInThread();
|
||||
autodetectProcessFactory = new NativeAutodetectProcessFactory(jobProvider, env, settings, nativeController, client);
|
||||
normalizerProcessFactory = new NativeNormalizerProcessFactory(env, settings, nativeController);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("Failed to create native process factories", e);
|
||||
}
|
||||
} else {
|
||||
autodetectProcessFactory = (jobDetails, modelSnapshot, quantiles, filters, ignoreDowntime, executorService) ->
|
||||
new BlackHoleAutodetectProcess();
|
||||
// factor of 1.0 makes renormalization a no-op
|
||||
normalizerProcessFactory = (jobId, quantilesState, bucketSpan, perPartitionNormalization,
|
||||
executorService) -> new MultiplyingNormalizerProcess(settings, 1.0);
|
||||
}
|
||||
NormalizerFactory normalizerFactory = new NormalizerFactory(normalizerProcessFactory,
|
||||
threadPool.executor(MlPlugin.THREAD_POOL_NAME));
|
||||
AutodetectProcessManager dataProcessor = new AutodetectProcessManager(settings, client, threadPool, jobManager, jobProvider,
|
||||
jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory);
|
||||
DatafeedJobRunner datafeedJobRunner = new DatafeedJobRunner(threadPool, client, clusterService, jobProvider,
|
||||
System::currentTimeMillis);
|
||||
PersistentActionService persistentActionService = new PersistentActionService(Settings.EMPTY, clusterService, client);
|
||||
PersistentActionRegistry persistentActionRegistry = new PersistentActionRegistry(Settings.EMPTY);
|
||||
|
||||
return Arrays.asList(
|
||||
jobProvider,
|
||||
jobManager,
|
||||
dataProcessor,
|
||||
new MlInitializationService(settings, threadPool, clusterService, jobProvider),
|
||||
jobDataCountsPersister,
|
||||
datafeedJobRunner,
|
||||
persistentActionService,
|
||||
persistentActionRegistry,
|
||||
new PersistentTaskClusterService(Settings.EMPTY, persistentActionRegistry, clusterService)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<RestHandler> getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings,
|
||||
IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Supplier<DiscoveryNodes> nodesInCluster) {
|
||||
if (false == enabled) {
|
||||
return emptyList();
|
||||
}
|
||||
return Arrays.asList(
|
||||
new RestGetJobsAction(settings, restController),
|
||||
new RestGetJobStatsAction(settings, restController),
|
||||
new RestPutJobAction(settings, restController),
|
||||
new RestPostJobUpdateAction(settings, restController),
|
||||
new RestDeleteJobAction(settings, restController),
|
||||
new RestOpenJobAction(settings, restController),
|
||||
new RestGetFiltersAction(settings, restController),
|
||||
new RestPutFilterAction(settings, restController),
|
||||
new RestDeleteFilterAction(settings, restController),
|
||||
new RestGetInfluencersAction(settings, restController),
|
||||
new RestGetRecordsAction(settings, restController),
|
||||
new RestGetBucketsAction(settings, restController),
|
||||
new RestPostDataAction(settings, restController),
|
||||
new RestCloseJobAction(settings, restController),
|
||||
new RestFlushJobAction(settings, restController),
|
||||
new RestValidateDetectorAction(settings, restController),
|
||||
new RestValidateJobConfigAction(settings, restController),
|
||||
new RestGetCategoriesAction(settings, restController),
|
||||
new RestGetModelSnapshotsAction(settings, restController),
|
||||
new RestRevertModelSnapshotAction(settings, restController),
|
||||
new RestUpdateModelSnapshotAction(settings, restController),
|
||||
new RestGetDatafeedsAction(settings, restController),
|
||||
new RestGetDatafeedStatsAction(settings, restController),
|
||||
new RestPutDatafeedAction(settings, restController),
|
||||
new RestDeleteDatafeedAction(settings, restController),
|
||||
new RestStartDatafeedAction(settings, restController),
|
||||
new RestStopDatafeedAction(settings, restController),
|
||||
new RestDeleteModelSnapshotAction(settings, restController)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
|
||||
if (false == enabled) {
|
||||
return emptyList();
|
||||
}
|
||||
return Arrays.asList(
|
||||
new ActionHandler<>(GetJobsAction.INSTANCE, GetJobsAction.TransportAction.class),
|
||||
new ActionHandler<>(GetJobsStatsAction.INSTANCE, GetJobsStatsAction.TransportAction.class),
|
||||
new ActionHandler<>(PutJobAction.INSTANCE, PutJobAction.TransportAction.class),
|
||||
new ActionHandler<>(UpdateJobAction.INSTANCE, UpdateJobAction.TransportAction.class),
|
||||
new ActionHandler<>(DeleteJobAction.INSTANCE, DeleteJobAction.TransportAction.class),
|
||||
new ActionHandler<>(OpenJobAction.INSTANCE, OpenJobAction.TransportAction.class),
|
||||
new ActionHandler<>(InternalOpenJobAction.INSTANCE, InternalOpenJobAction.TransportAction.class),
|
||||
new ActionHandler<>(UpdateJobStateAction.INSTANCE, UpdateJobStateAction.TransportAction.class),
|
||||
new ActionHandler<>(GetFiltersAction.INSTANCE, GetFiltersAction.TransportAction.class),
|
||||
new ActionHandler<>(PutFilterAction.INSTANCE, PutFilterAction.TransportAction.class),
|
||||
new ActionHandler<>(DeleteFilterAction.INSTANCE, DeleteFilterAction.TransportAction.class),
|
||||
new ActionHandler<>(GetBucketsAction.INSTANCE, GetBucketsAction.TransportAction.class),
|
||||
new ActionHandler<>(GetInfluencersAction.INSTANCE, GetInfluencersAction.TransportAction.class),
|
||||
new ActionHandler<>(GetRecordsAction.INSTANCE, GetRecordsAction.TransportAction.class),
|
||||
new ActionHandler<>(PostDataAction.INSTANCE, PostDataAction.TransportAction.class),
|
||||
new ActionHandler<>(CloseJobAction.INSTANCE, CloseJobAction.TransportAction.class),
|
||||
new ActionHandler<>(FlushJobAction.INSTANCE, FlushJobAction.TransportAction.class),
|
||||
new ActionHandler<>(ValidateDetectorAction.INSTANCE, ValidateDetectorAction.TransportAction.class),
|
||||
new ActionHandler<>(ValidateJobConfigAction.INSTANCE, ValidateJobConfigAction.TransportAction.class),
|
||||
new ActionHandler<>(GetCategoriesAction.INSTANCE, GetCategoriesAction.TransportAction.class),
|
||||
new ActionHandler<>(GetModelSnapshotsAction.INSTANCE, GetModelSnapshotsAction.TransportAction.class),
|
||||
new ActionHandler<>(RevertModelSnapshotAction.INSTANCE, RevertModelSnapshotAction.TransportAction.class),
|
||||
new ActionHandler<>(UpdateModelSnapshotAction.INSTANCE, UpdateModelSnapshotAction.TransportAction.class),
|
||||
new ActionHandler<>(GetDatafeedsAction.INSTANCE, GetDatafeedsAction.TransportAction.class),
|
||||
new ActionHandler<>(GetDatafeedsStatsAction.INSTANCE, GetDatafeedsStatsAction.TransportAction.class),
|
||||
new ActionHandler<>(PutDatafeedAction.INSTANCE, PutDatafeedAction.TransportAction.class),
|
||||
new ActionHandler<>(DeleteDatafeedAction.INSTANCE, DeleteDatafeedAction.TransportAction.class),
|
||||
new ActionHandler<>(StartDatafeedAction.INSTANCE, StartDatafeedAction.TransportAction.class),
|
||||
new ActionHandler<>(StopDatafeedAction.INSTANCE, StopDatafeedAction.TransportAction.class),
|
||||
new ActionHandler<>(DeleteModelSnapshotAction.INSTANCE, DeleteModelSnapshotAction.TransportAction.class),
|
||||
new ActionHandler<>(StartPersistentTaskAction.INSTANCE, StartPersistentTaskAction.TransportAction.class),
|
||||
new ActionHandler<>(UpdatePersistentTaskStatusAction.INSTANCE, UpdatePersistentTaskStatusAction.TransportAction.class),
|
||||
new ActionHandler<>(CompletionPersistentTaskAction.INSTANCE, CompletionPersistentTaskAction.TransportAction.class),
|
||||
new ActionHandler<>(RemovePersistentTaskAction.INSTANCE, RemovePersistentTaskAction.TransportAction.class),
|
||||
new ActionHandler<>(MlDeleteByQueryAction.INSTANCE, MlDeleteByQueryAction.TransportAction.class),
|
||||
new ActionHandler<>(UpdateProcessAction.INSTANCE, UpdateProcessAction.TransportAction.class)
|
||||
);
|
||||
}
|
||||
|
||||
public static Path resolveConfigFile(Environment env, String name) {
|
||||
return env.configFile().resolve(NAME).resolve(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ExecutorBuilder<?>> getExecutorBuilders(Settings settings) {
|
||||
if (false == enabled) {
|
||||
return emptyList();
|
||||
}
|
||||
int maxNumberOfJobs = AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE.get(settings);
|
||||
FixedExecutorBuilder ml = new FixedExecutorBuilder(settings, THREAD_POOL_NAME,
|
||||
maxNumberOfJobs * 2, 1000, "xpack.ml.thread_pool");
|
||||
|
||||
// fail quick to run autodetect process / datafeed, so no queues
|
||||
// 4 threads: for c++ logging, result processing, state processing and restore state
|
||||
FixedExecutorBuilder autoDetect = new FixedExecutorBuilder(settings, AUTODETECT_PROCESS_THREAD_POOL_NAME,
|
||||
maxNumberOfJobs * 4, 4, "xpack.ml.autodetect_process_thread_pool");
|
||||
|
||||
// TODO: if datafeed and non datafeed jobs are considered more equal and the datafeed and
|
||||
// autodetect process are created at the same time then these two different TPs can merge.
|
||||
FixedExecutorBuilder datafeed = new FixedExecutorBuilder(settings, DATAFEED_RUNNER_THREAD_POOL_NAME,
|
||||
maxNumberOfJobs, 1, "xpack.ml.datafeed_thread_pool");
|
||||
return Arrays.asList(ml, autoDetect, datafeed);
|
||||
}
|
||||
}
|
@ -0,0 +1,249 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.tasks.TaskInfo;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.JobStateObserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobAction.Response, CloseJobAction.RequestBuilder> {
|
||||
|
||||
public static final CloseJobAction INSTANCE = new CloseJobAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/close";
|
||||
|
||||
private CloseJobAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest {
|
||||
|
||||
private String jobId;
|
||||
private TimeValue closeTimeout = TimeValue.timeValueMinutes(20);
|
||||
|
||||
Request() {}
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public TimeValue getCloseTimeout() {
|
||||
return closeTimeout;
|
||||
}
|
||||
|
||||
public void setCloseTimeout(TimeValue closeTimeout) {
|
||||
this.closeTimeout = closeTimeout;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
closeTimeout = new TimeValue(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
closeTimeout.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, closeTimeout);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(closeTimeout, other.closeTimeout);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, CloseJobAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private boolean closed;
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
Response(boolean closed) {
|
||||
this.closed = closed;
|
||||
}
|
||||
|
||||
public boolean isClosed() {
|
||||
return closed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
closed = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(closed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("closed", closed);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Response response = (Response) o;
|
||||
return closed == response.closed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(closed);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final ClusterService clusterService;
|
||||
private final JobStateObserver jobStateObserver;
|
||||
private final TransportListTasksAction listTasksAction;
|
||||
private final TransportCancelTasksAction cancelTasksAction;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
ClusterService clusterService, TransportCancelTasksAction cancelTasksAction,
|
||||
TransportListTasksAction listTasksAction) {
|
||||
super(settings, CloseJobAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.clusterService = clusterService;
|
||||
this.jobStateObserver = new JobStateObserver(threadPool, clusterService);
|
||||
this.cancelTasksAction = cancelTasksAction;
|
||||
this.listTasksAction = listTasksAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
|
||||
validate(request.jobId, mlMetadata);
|
||||
|
||||
ListTasksRequest listTasksRequest = new ListTasksRequest();
|
||||
listTasksRequest.setActions(InternalOpenJobAction.NAME);
|
||||
listTasksRequest.setDetailed(true);
|
||||
listTasksAction.execute(listTasksRequest, ActionListener.wrap(listTasksResponse -> {
|
||||
String expectedJobDescription = "job-" + request.jobId;
|
||||
for (TaskInfo taskInfo : listTasksResponse.getTasks()) {
|
||||
if (expectedJobDescription.equals(taskInfo.getDescription())) {
|
||||
CancelTasksRequest cancelTasksRequest = new CancelTasksRequest();
|
||||
cancelTasksRequest.setTaskId(taskInfo.getTaskId());
|
||||
cancelTasksAction.execute(cancelTasksRequest, ActionListener.wrap(
|
||||
cancelTasksResponse -> {
|
||||
jobStateObserver.waitForState(request.jobId, request.closeTimeout, JobState.CLOSED,
|
||||
e -> {
|
||||
if (e != null) {
|
||||
listener.onFailure(e);
|
||||
} else {
|
||||
listener.onResponse(new CloseJobAction.Response(true));
|
||||
}
|
||||
}
|
||||
);
|
||||
},
|
||||
listener::onFailure)
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
listener.onFailure(new ResourceNotFoundException("No job [" + request.jobId + "] running"));
|
||||
}, listener::onFailure));
|
||||
}
|
||||
|
||||
static void validate(String jobId, MlMetadata mlMetadata) {
|
||||
Allocation allocation = mlMetadata.getAllocations().get(jobId);
|
||||
if (allocation == null) {
|
||||
throw ExceptionsHelper.missingJobException(jobId);
|
||||
}
|
||||
|
||||
if (allocation.getState() != JobState.OPENED) {
|
||||
throw new ElasticsearchStatusException("job not opened, expected job state [{}], but got [{}]",
|
||||
RestStatus.CONFLICT, JobState.OPENED, allocation.getState());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,189 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DeleteDatafeedAction extends Action<DeleteDatafeedAction.Request, DeleteDatafeedAction.Response,
|
||||
DeleteDatafeedAction.RequestBuilder> {
|
||||
|
||||
public static final DeleteDatafeedAction INSTANCE = new DeleteDatafeedAction();
|
||||
public static final String NAME = "cluster:admin/ml/datafeeds/delete";
|
||||
|
||||
private DeleteDatafeedAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
|
||||
|
||||
private String datafeedId;
|
||||
|
||||
public Request(String datafeedId) {
|
||||
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
datafeedId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return Objects.equals(datafeedId, request.datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, DeleteDatafeedAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse {
|
||||
|
||||
private Response() {
|
||||
}
|
||||
|
||||
private Response(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, DeleteDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
clusterService.submitStateUpdateTask("delete-datafeed-" + request.getDatafeedId(),
|
||||
new AckedClusterStateUpdateTask<Response>(request, listener) {
|
||||
|
||||
@Override
|
||||
protected Response newResponse(boolean acknowledged) {
|
||||
return new Response(acknowledged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
MlMetadata currentMetadata = state.getMetaData().custom(MlMetadata.TYPE);
|
||||
PersistentTasksInProgress persistentTasksInProgress = state.custom(PersistentTasksInProgress.TYPE);
|
||||
MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata)
|
||||
.removeDatafeed(request.getDatafeedId(), persistentTasksInProgress).build();
|
||||
return ClusterState.builder(state).metaData(
|
||||
MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build())
|
||||
.build();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,216 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.delete.TransportDeleteAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Detector;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.job.config.MlFilter;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
|
||||
public class DeleteFilterAction extends Action<DeleteFilterAction.Request, DeleteFilterAction.Response, DeleteFilterAction.RequestBuilder> {
|
||||
|
||||
public static final DeleteFilterAction INSTANCE = new DeleteFilterAction();
|
||||
public static final String NAME = "cluster:admin/ml/filters/delete";
|
||||
|
||||
private DeleteFilterAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends AcknowledgedRequest<Request> {
|
||||
|
||||
public static final ParseField FILTER_ID = new ParseField("filter_id");
|
||||
|
||||
private String filterId;
|
||||
|
||||
Request() {
|
||||
|
||||
}
|
||||
|
||||
public Request(String filterId) {
|
||||
this.filterId = ExceptionsHelper.requireNonNull(filterId, FILTER_ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getFilterId() {
|
||||
return filterId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
filterId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(filterId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(filterId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(filterId, other.filterId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, DeleteFilterAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse {
|
||||
|
||||
public Response(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
private Response() {}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
|
||||
|
||||
private final TransportDeleteAction transportAction;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
TransportDeleteAction transportAction) {
|
||||
super(settings, DeleteFilterAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
this.transportAction = transportAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
|
||||
final String filterId = request.getFilterId();
|
||||
MlMetadata currentMlMetadata = state.metaData().custom(MlMetadata.TYPE);
|
||||
Map<String, Job> jobs = currentMlMetadata.getJobs();
|
||||
List<String> currentlyUsedBy = new ArrayList<>();
|
||||
for (Job job : jobs.values()) {
|
||||
List<Detector> detectors = job.getAnalysisConfig().getDetectors();
|
||||
for (Detector detector : detectors) {
|
||||
if (detector.extractReferencedFilters().contains(filterId)) {
|
||||
currentlyUsedBy.add(job.getId());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!currentlyUsedBy.isEmpty()) {
|
||||
throw ExceptionsHelper.conflictStatusException("Cannot delete filter, currently used by jobs: "
|
||||
+ currentlyUsedBy);
|
||||
}
|
||||
|
||||
DeleteRequest deleteRequest = new DeleteRequest(JobProvider.ML_META_INDEX, MlFilter.TYPE.getPreferredName(), filterId);
|
||||
transportAction.execute(deleteRequest, new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse deleteResponse) {
|
||||
if (deleteResponse.status().equals(RestStatus.NOT_FOUND)) {
|
||||
listener.onFailure(new ResourceNotFoundException("Could not delete filter with ID [" + filterId
|
||||
+ "] because it does not exist"));
|
||||
} else {
|
||||
listener.onResponse(new Response(true));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("Could not delete filter with ID [" + filterId + "]", e);
|
||||
listener.onFailure(new IllegalStateException("Could not delete filter with ID [" + filterId + "]", e));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,187 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobStorageDeletionTask;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DeleteJobAction extends Action<DeleteJobAction.Request, DeleteJobAction.Response, DeleteJobAction.RequestBuilder> {
|
||||
|
||||
public static final DeleteJobAction INSTANCE = new DeleteJobAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/delete";
|
||||
|
||||
private DeleteJobAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends AcknowledgedRequest<Request> {
|
||||
|
||||
private String jobId;
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
|
||||
return new JobStorageDeletionTask(id, type, action, "delete-job-" + jobId, parentTaskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
DeleteJobAction.Request other = (DeleteJobAction.Request) obj;
|
||||
return Objects.equals(jobId, other.jobId);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, DeleteJobAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse {
|
||||
|
||||
public Response(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
private Response() {}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
|
||||
|
||||
private final JobManager jobManager;
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager,
|
||||
Client client) {
|
||||
super(settings, DeleteJobAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
this.jobManager = jobManager;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Task task, Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
jobManager.deleteJob(request, client, (JobStorageDeletionTask) task, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
throw new UnsupportedOperationException("the Task parameter is required");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,201 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class DeleteModelSnapshotAction extends Action<DeleteModelSnapshotAction.Request,
|
||||
DeleteModelSnapshotAction.Response, DeleteModelSnapshotAction.RequestBuilder> {
|
||||
|
||||
public static final DeleteModelSnapshotAction INSTANCE = new DeleteModelSnapshotAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/model_snapshots/delete";
|
||||
|
||||
private DeleteModelSnapshotAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DeleteModelSnapshotAction.RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DeleteModelSnapshotAction.Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest {
|
||||
|
||||
private String jobId;
|
||||
private String snapshotId;
|
||||
|
||||
private Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId, String snapshotId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, ModelSnapshot.SNAPSHOT_ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public String getSnapshotId() {
|
||||
return snapshotId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
snapshotId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeString(snapshotId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse {
|
||||
|
||||
public Response(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
private Response() {}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, DeleteModelSnapshotAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final Client client;
|
||||
private final JobProvider jobProvider;
|
||||
private final JobManager jobManager;
|
||||
private final ClusterService clusterService;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
JobProvider jobProvider, JobManager jobManager, ClusterService clusterService,
|
||||
Client client) {
|
||||
super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.client = client;
|
||||
this.jobProvider = jobProvider;
|
||||
this.jobManager = jobManager;
|
||||
this.clusterService = clusterService;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
// Verify the snapshot exists
|
||||
jobProvider.modelSnapshots(
|
||||
request.getJobId(), 0, 1, null, null, null, true, request.getSnapshotId(), null,
|
||||
page -> {
|
||||
List<ModelSnapshot> deleteCandidates = page.results();
|
||||
if (deleteCandidates.size() > 1) {
|
||||
logger.warn("More than one model found for [job_id: " + request.getJobId()
|
||||
+ ", snapshot_id: " + request.getSnapshotId() + "] tuple.");
|
||||
}
|
||||
|
||||
if (deleteCandidates.isEmpty()) {
|
||||
listener.onFailure(new ResourceNotFoundException(
|
||||
Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, request.getJobId())));
|
||||
}
|
||||
ModelSnapshot deleteCandidate = deleteCandidates.get(0);
|
||||
|
||||
// Verify the snapshot is not being used
|
||||
//
|
||||
// NORELEASE: technically, this could be stale and refuse a delete, but I think that's acceptable
|
||||
// since it is non-destructive
|
||||
QueryPage<Job> job = jobManager.getJob(request.getJobId(), clusterService.state());
|
||||
if (job.count() > 0) {
|
||||
String currentModelInUse = job.results().get(0).getModelSnapshotId();
|
||||
if (currentModelInUse != null && currentModelInUse.equals(request.getSnapshotId())) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.REST_CANNOT_DELETE_HIGHEST_PRIORITY,
|
||||
request.getSnapshotId(), request.getJobId()));
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the snapshot and any associated state files
|
||||
JobDataDeleter deleter = new JobDataDeleter(client, request.getJobId());
|
||||
deleter.deleteModelSnapshot(deleteCandidate);
|
||||
deleter.commit(new ActionListener<BulkResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkResponse bulkResponse) {
|
||||
// We don't care about the bulk response, just that it succeeded
|
||||
listener.onResponse(new DeleteModelSnapshotAction.Response(true));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
||||
jobManager.audit(request.getJobId()).info(Messages.getMessage(Messages.JOB_AUDIT_SNAPSHOT_DELETED,
|
||||
deleteCandidate.getDescription()));
|
||||
}, listener::onFailure);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,287 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.MlPlugin;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.InterimResultsParams;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class FlushJobAction extends Action<FlushJobAction.Request, FlushJobAction.Response, FlushJobAction.RequestBuilder> {
|
||||
|
||||
public static final FlushJobAction INSTANCE = new FlushJobAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/flush";
|
||||
|
||||
private FlushJobAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends TransportJobTaskAction.JobTaskRequest<Request> implements ToXContent {
|
||||
|
||||
public static final ParseField CALC_INTERIM = new ParseField("calc_interim");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField ADVANCE_TIME = new ParseField("advance_time");
|
||||
|
||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareBoolean(Request::setCalcInterim, CALC_INTERIM);
|
||||
PARSER.declareString(Request::setStart, START);
|
||||
PARSER.declareString(Request::setEnd, END);
|
||||
PARSER.declareString(Request::setAdvanceTime, ADVANCE_TIME);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private boolean calcInterim = false;
|
||||
private String start;
|
||||
private String end;
|
||||
private String advanceTime;
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public boolean getCalcInterim() {
|
||||
return calcInterim;
|
||||
}
|
||||
|
||||
public void setCalcInterim(boolean calcInterim) {
|
||||
this.calcInterim = calcInterim;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public String getAdvanceTime() { return advanceTime; }
|
||||
|
||||
public void setAdvanceTime(String advanceTime) {
|
||||
this.advanceTime = advanceTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
calcInterim = in.readBoolean();
|
||||
start = in.readOptionalString();
|
||||
end = in.readOptionalString();
|
||||
advanceTime = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(calcInterim);
|
||||
out.writeOptionalString(start);
|
||||
out.writeOptionalString(end);
|
||||
out.writeOptionalString(advanceTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, calcInterim, start, end, advanceTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
calcInterim == other.calcInterim &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(advanceTime, other.advanceTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(CALC_INTERIM.getPreferredName(), calcInterim);
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
if (advanceTime != null) {
|
||||
builder.field(ADVANCE_TIME.getPreferredName(), advanceTime);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, FlushJobAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject {
|
||||
|
||||
private boolean flushed;
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
Response(boolean flushed) {
|
||||
super(null, null);
|
||||
this.flushed = flushed;
|
||||
}
|
||||
|
||||
public boolean isFlushed() {
|
||||
return flushed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
flushed = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(flushed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("flushed", flushed);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Response response = (Response) o;
|
||||
return flushed == response.flushed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(flushed);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportJobTaskAction<InternalOpenJobAction.JobTask, Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
AutodetectProcessManager processManager, JobManager jobManager) {
|
||||
super(settings, FlushJobAction.NAME, threadPool, clusterService, transportService, actionFilters,
|
||||
indexNameExpressionResolver, FlushJobAction.Request::new, FlushJobAction.Response::new, MlPlugin.THREAD_POOL_NAME,
|
||||
jobManager, processManager, Request::getJobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FlushJobAction.Response readTaskResponse(StreamInput in) throws IOException {
|
||||
Response response = new Response();
|
||||
response.readFrom(in);
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void taskOperation(Request request, InternalOpenJobAction.JobTask task,
|
||||
ActionListener<FlushJobAction.Response> listener) {
|
||||
jobManager.getJobOrThrowIfUnknown(request.getJobId());
|
||||
|
||||
InterimResultsParams.Builder paramsBuilder = InterimResultsParams.builder();
|
||||
paramsBuilder.calcInterim(request.getCalcInterim());
|
||||
if (request.getAdvanceTime() != null) {
|
||||
paramsBuilder.advanceTime(request.getAdvanceTime());
|
||||
}
|
||||
TimeRange.Builder timeRangeBuilder = TimeRange.builder();
|
||||
if (request.getStart() != null) {
|
||||
timeRangeBuilder.startTime(request.getStart());
|
||||
}
|
||||
if (request.getEnd() != null) {
|
||||
timeRangeBuilder.endTime(request.getEnd());
|
||||
}
|
||||
paramsBuilder.forTimeRange(timeRangeBuilder.build());
|
||||
processManager.flushJob(request.getJobId(), paramsBuilder.build());
|
||||
listener.onResponse(new Response(true));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,421 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.PageParams;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucketsAction.Response, GetBucketsAction.RequestBuilder> {
|
||||
|
||||
public static final GetBucketsAction INSTANCE = new GetBucketsAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/results/buckets/get";
|
||||
|
||||
private GetBucketsAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
public static final ParseField EXPAND = new ParseField("expand");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("include_interim");
|
||||
public static final ParseField PARTITION_VALUE = new ParseField("partition_value");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("max_normalized_probability");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
|
||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareString(Request::setTimestamp, Bucket.TIMESTAMP);
|
||||
PARSER.declareString(Request::setPartitionValue, PARTITION_VALUE);
|
||||
PARSER.declareBoolean(Request::setExpand, EXPAND);
|
||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||
PARSER.declareStringOrNull(Request::setStart, START);
|
||||
PARSER.declareStringOrNull(Request::setEnd, END);
|
||||
PARSER.declareBoolean(Request::setExpand, EXPAND);
|
||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
PARSER.declareDouble(Request::setAnomalyScore, ANOMALY_SCORE);
|
||||
PARSER.declareDouble(Request::setMaxNormalizedProbability, MAX_NORMALIZED_PROBABILITY);
|
||||
PARSER.declareString(Request::setPartitionValue, PARTITION_VALUE);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private String timestamp;
|
||||
private boolean expand = false;
|
||||
private boolean includeInterim = false;
|
||||
private String partitionValue;
|
||||
private String start;
|
||||
private String end;
|
||||
private PageParams pageParams;
|
||||
private Double anomalyScore;
|
||||
private Double maxNormalizedProbability;
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public void setTimestamp(String timestamp) {
|
||||
if (pageParams != null || start != null || end != null || anomalyScore != null || maxNormalizedProbability != null) {
|
||||
throw new IllegalArgumentException("Param [" + TIMESTAMP.getPreferredName() + "] is incompatible with ["
|
||||
+ PageParams.FROM.getPreferredName() + ","
|
||||
+ PageParams.SIZE.getPreferredName() + ","
|
||||
+ START.getPreferredName() + ","
|
||||
+ END.getPreferredName() + ","
|
||||
+ ANOMALY_SCORE.getPreferredName() + ","
|
||||
+ MAX_NORMALIZED_PROBABILITY.getPreferredName() + "]");
|
||||
}
|
||||
this.timestamp = ExceptionsHelper.requireNonNull(timestamp, Bucket.TIMESTAMP.getPreferredName());
|
||||
}
|
||||
|
||||
public String getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public boolean isExpand() {
|
||||
return expand;
|
||||
}
|
||||
|
||||
public void setExpand(boolean expand) {
|
||||
this.expand = expand;
|
||||
}
|
||||
|
||||
public boolean isIncludeInterim() {
|
||||
return includeInterim;
|
||||
}
|
||||
|
||||
public void setIncludeInterim(boolean includeInterim) {
|
||||
this.includeInterim = includeInterim;
|
||||
}
|
||||
|
||||
public String getPartitionValue() {
|
||||
return partitionValue;
|
||||
}
|
||||
|
||||
public void setPartitionValue(String partitionValue) {
|
||||
if (timestamp != null) {
|
||||
throw new IllegalArgumentException("Param [" + PARTITION_VALUE.getPreferredName() + "] is incompatible with ["
|
||||
+ TIMESTAMP.getPreferredName() + "].");
|
||||
}
|
||||
this.partitionValue = partitionValue;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(String start) {
|
||||
if (timestamp != null) {
|
||||
throw new IllegalArgumentException("Param [" + START.getPreferredName() + "] is incompatible with ["
|
||||
+ TIMESTAMP.getPreferredName() + "].");
|
||||
}
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(String end) {
|
||||
if (timestamp != null) {
|
||||
throw new IllegalArgumentException("Param [" + END.getPreferredName() + "] is incompatible with ["
|
||||
+ TIMESTAMP.getPreferredName() + "].");
|
||||
}
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
if (timestamp != null) {
|
||||
throw new IllegalArgumentException("Param [" + PageParams.FROM.getPreferredName()
|
||||
+ ", " + PageParams.SIZE.getPreferredName() + "] is incompatible with [" + TIMESTAMP.getPreferredName() + "].");
|
||||
}
|
||||
this.pageParams = ExceptionsHelper.requireNonNull(pageParams, PageParams.PAGE.getPreferredName());
|
||||
}
|
||||
|
||||
public double getAnomalyScore() {
|
||||
return anomalyScore;
|
||||
}
|
||||
|
||||
public void setAnomalyScore(double anomalyScore) {
|
||||
if (timestamp != null) {
|
||||
throw new IllegalArgumentException("Param [" + ANOMALY_SCORE.getPreferredName() + "] is incompatible with ["
|
||||
+ TIMESTAMP.getPreferredName() + "].");
|
||||
}
|
||||
this.anomalyScore = anomalyScore;
|
||||
}
|
||||
|
||||
public double getMaxNormalizedProbability() {
|
||||
return maxNormalizedProbability;
|
||||
}
|
||||
|
||||
public void setMaxNormalizedProbability(double maxNormalizedProbability) {
|
||||
if (timestamp != null) {
|
||||
throw new IllegalArgumentException("Param [" + MAX_NORMALIZED_PROBABILITY.getPreferredName() + "] is incompatible with ["
|
||||
+ TIMESTAMP.getPreferredName() + "].");
|
||||
}
|
||||
this.maxNormalizedProbability = maxNormalizedProbability;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
timestamp = in.readOptionalString();
|
||||
expand = in.readBoolean();
|
||||
includeInterim = in.readBoolean();
|
||||
partitionValue = in.readOptionalString();
|
||||
start = in.readOptionalString();
|
||||
end = in.readOptionalString();
|
||||
anomalyScore = in.readOptionalDouble();
|
||||
maxNormalizedProbability = in.readOptionalDouble();
|
||||
pageParams = in.readOptionalWriteable(PageParams::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeOptionalString(timestamp);
|
||||
out.writeBoolean(expand);
|
||||
out.writeBoolean(includeInterim);
|
||||
out.writeOptionalString(partitionValue);
|
||||
out.writeOptionalString(start);
|
||||
out.writeOptionalString(end);
|
||||
out.writeOptionalDouble(anomalyScore);
|
||||
out.writeOptionalDouble(maxNormalizedProbability);
|
||||
out.writeOptionalWriteable(pageParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (timestamp != null) {
|
||||
builder.field(Bucket.TIMESTAMP.getPreferredName(), timestamp);
|
||||
}
|
||||
builder.field(EXPAND.getPreferredName(), expand);
|
||||
builder.field(INCLUDE_INTERIM.getPreferredName(), includeInterim);
|
||||
if (partitionValue != null) {
|
||||
builder.field(PARTITION_VALUE.getPreferredName(), partitionValue);
|
||||
}
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
if (pageParams != null) {
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
}
|
||||
if (anomalyScore != null) {
|
||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
||||
}
|
||||
if (maxNormalizedProbability != null) {
|
||||
builder.field(MAX_NORMALIZED_PROBABILITY.getPreferredName(), maxNormalizedProbability);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, timestamp, partitionValue, expand, includeInterim,
|
||||
anomalyScore, maxNormalizedProbability, pageParams, start, end);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(timestamp, other.timestamp) &&
|
||||
Objects.equals(partitionValue, other.partitionValue) &&
|
||||
Objects.equals(expand, other.expand) &&
|
||||
Objects.equals(includeInterim, other.includeInterim) &&
|
||||
Objects.equals(anomalyScore, other.anomalyScore) &&
|
||||
Objects.equals(maxNormalizedProbability, other.maxNormalizedProbability) &&
|
||||
Objects.equals(pageParams, other.pageParams) &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client) {
|
||||
super(client, INSTANCE, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private QueryPage<Bucket> buckets;
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
Response(QueryPage<Bucket> buckets) {
|
||||
this.buckets = buckets;
|
||||
}
|
||||
|
||||
public QueryPage<Bucket> getBuckets() {
|
||||
return buckets;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
buckets = new QueryPage<>(in, Bucket::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
buckets.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
buckets.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(buckets);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(buckets, other.buckets);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final JobProvider jobProvider;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
JobProvider jobProvider) {
|
||||
super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.jobProvider = jobProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
BucketsQueryBuilder query =
|
||||
new BucketsQueryBuilder().expand(request.expand)
|
||||
.includeInterim(request.includeInterim)
|
||||
.start(request.start)
|
||||
.end(request.end)
|
||||
.anomalyScoreThreshold(request.anomalyScore)
|
||||
.normalizedProbabilityThreshold(request.maxNormalizedProbability)
|
||||
.partitionValue(request.partitionValue);
|
||||
|
||||
if (request.pageParams != null) {
|
||||
query.from(request.pageParams.getFrom())
|
||||
.size(request.pageParams.getSize());
|
||||
}
|
||||
if (request.timestamp != null) {
|
||||
query.timestamp(request.timestamp);
|
||||
} else {
|
||||
query.start(request.start);
|
||||
query.end(request.end);
|
||||
}
|
||||
jobProvider.buckets(request.jobId, query.build(), q -> listener.onResponse(new Response(q)), listener::onFailure);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,255 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.results.CategoryDefinition;
|
||||
import org.elasticsearch.xpack.ml.action.util.PageParams;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
||||
public class GetCategoriesAction extends
|
||||
Action<GetCategoriesAction.Request, GetCategoriesAction.Response, GetCategoriesAction.RequestBuilder> {
|
||||
|
||||
public static final GetCategoriesAction INSTANCE = new GetCategoriesAction();
|
||||
private static final String NAME = "cluster:admin/ml/anomaly_detectors/results/categories/get";
|
||||
|
||||
private GetCategoriesAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
public static final ParseField CATEGORY_ID = new ParseField("category_id");
|
||||
public static final ParseField FROM = new ParseField("from");
|
||||
public static final ParseField SIZE = new ParseField("size");
|
||||
|
||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareString(Request::setCategoryId, CATEGORY_ID);
|
||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private String categoryId;
|
||||
private PageParams pageParams;
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public String getCategoryId() {
|
||||
return categoryId;
|
||||
}
|
||||
|
||||
public void setCategoryId(String categoryId) {
|
||||
if (pageParams != null) {
|
||||
throw new IllegalArgumentException("Param [" + CATEGORY_ID.getPreferredName() + "] is incompatible with ["
|
||||
+ PageParams.FROM.getPreferredName() + ", " + PageParams.SIZE.getPreferredName() + "].");
|
||||
}
|
||||
this.categoryId = ExceptionsHelper.requireNonNull(categoryId, CATEGORY_ID.getPreferredName());
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
if (categoryId != null) {
|
||||
throw new IllegalArgumentException("Param [" + PageParams.FROM.getPreferredName() + ", "
|
||||
+ PageParams.SIZE.getPreferredName() + "] is incompatible with [" + CATEGORY_ID.getPreferredName() + "].");
|
||||
}
|
||||
this.pageParams = pageParams;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
if (pageParams == null && categoryId == null) {
|
||||
validationException = addValidationError("Both [" + CATEGORY_ID.getPreferredName() + "] and ["
|
||||
+ PageParams.FROM.getPreferredName() + ", " + PageParams.SIZE.getPreferredName() + "] "
|
||||
+ "cannot be null" , validationException);
|
||||
}
|
||||
return validationException;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
categoryId = in.readOptionalString();
|
||||
pageParams = in.readOptionalWriteable(PageParams::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeOptionalString(categoryId);
|
||||
out.writeOptionalWriteable(pageParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (categoryId != null) {
|
||||
builder.field(CATEGORY_ID.getPreferredName(), categoryId);
|
||||
}
|
||||
if (pageParams != null) {
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
Request request = (Request) o;
|
||||
return Objects.equals(jobId, request.jobId)
|
||||
&& Objects.equals(categoryId, request.categoryId)
|
||||
&& Objects.equals(pageParams, request.pageParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, categoryId, pageParams);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, GetCategoriesAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private QueryPage<CategoryDefinition> result;
|
||||
|
||||
public Response(QueryPage<CategoryDefinition> result) {
|
||||
this.result = result;
|
||||
}
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
public QueryPage<CategoryDefinition> getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
result = new QueryPage<>(in, CategoryDefinition::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
result.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
result.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
Response response = (Response) o;
|
||||
return Objects.equals(result, response.result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(result);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final JobProvider jobProvider;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider) {
|
||||
super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.jobProvider = jobProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
Integer from = request.pageParams != null ? request.pageParams.getFrom() : null;
|
||||
Integer size = request.pageParams != null ? request.pageParams.getSize() : null;
|
||||
jobProvider.categoryDefinitions(request.jobId, request.categoryId, from, size,
|
||||
r -> listener.onResponse(new Response(r)), listener::onFailure);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,221 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetDatafeedsAction extends Action<GetDatafeedsAction.Request, GetDatafeedsAction.Response,
|
||||
GetDatafeedsAction.RequestBuilder> {
|
||||
|
||||
public static final GetDatafeedsAction INSTANCE = new GetDatafeedsAction();
|
||||
public static final String NAME = "cluster:admin/ml/datafeeds/get";
|
||||
|
||||
public static final String ALL = "_all";
|
||||
|
||||
private GetDatafeedsAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeReadRequest<Request> {
|
||||
|
||||
private String datafeedId;
|
||||
|
||||
public Request(String datafeedId) {
|
||||
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
datafeedId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(datafeedId, other.datafeedId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, GetDatafeedsAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private QueryPage<DatafeedConfig> datafeeds;
|
||||
|
||||
public Response(QueryPage<DatafeedConfig> datafeeds) {
|
||||
this.datafeeds = datafeeds;
|
||||
}
|
||||
|
||||
public Response() {}
|
||||
|
||||
public QueryPage<DatafeedConfig> getResponse() {
|
||||
return datafeeds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
datafeeds = new QueryPage<>(in, DatafeedConfig::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
datafeeds.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
datafeeds.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeeds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(datafeeds, other.datafeeds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeReadAction<Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, GetDatafeedsAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
logger.debug("Get datafeed '{}'", request.getDatafeedId());
|
||||
|
||||
QueryPage<DatafeedConfig> response;
|
||||
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
|
||||
if (ALL.equals(request.getDatafeedId())) {
|
||||
List<DatafeedConfig> datafeedConfigs = new ArrayList<>(mlMetadata.getDatafeeds().values());
|
||||
response = new QueryPage<>(datafeedConfigs, datafeedConfigs.size(), DatafeedConfig.RESULTS_FIELD);
|
||||
} else {
|
||||
DatafeedConfig datafeed = mlMetadata.getDatafeed(request.getDatafeedId());
|
||||
if (datafeed == null) {
|
||||
throw ExceptionsHelper.missingDatafeedException(request.getDatafeedId());
|
||||
}
|
||||
response = new QueryPage<>(Collections.singletonList(datafeed), 1, DatafeedConfig.RESULTS_FIELD);
|
||||
}
|
||||
|
||||
listener.onResponse(new Response(response));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,303 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedState;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class GetDatafeedsStatsAction extends Action<GetDatafeedsStatsAction.Request, GetDatafeedsStatsAction.Response,
|
||||
GetDatafeedsStatsAction.RequestBuilder> {
|
||||
|
||||
public static final GetDatafeedsStatsAction INSTANCE = new GetDatafeedsStatsAction();
|
||||
public static final String NAME = "cluster:admin/ml/datafeeds/stats/get";
|
||||
|
||||
public static final String ALL = "_all";
|
||||
private static final String STATE = "state";
|
||||
|
||||
private GetDatafeedsStatsAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeReadRequest<Request> {
|
||||
|
||||
private String datafeedId;
|
||||
|
||||
public Request(String datafeedId) {
|
||||
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
datafeedId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(datafeedId, other.datafeedId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, GetDatafeedsStatsAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
public static class DatafeedStats implements ToXContent, Writeable {
|
||||
|
||||
private final String datafeedId;
|
||||
private final DatafeedState datafeedState;
|
||||
|
||||
DatafeedStats(String datafeedId, DatafeedState datafeedState) {
|
||||
this.datafeedId = Objects.requireNonNull(datafeedId);
|
||||
this.datafeedState = Objects.requireNonNull(datafeedState);
|
||||
}
|
||||
|
||||
DatafeedStats(StreamInput in) throws IOException {
|
||||
datafeedId = in.readString();
|
||||
datafeedState = DatafeedState.fromStream(in);
|
||||
}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
public DatafeedState getDatafeedState() {
|
||||
return datafeedState;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
|
||||
builder.field(STATE, datafeedState);
|
||||
builder.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(datafeedId);
|
||||
datafeedState.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId, datafeedState);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetDatafeedsStatsAction.Response.DatafeedStats other = (GetDatafeedsStatsAction.Response.DatafeedStats) obj;
|
||||
return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(this.datafeedState, other.datafeedState);
|
||||
}
|
||||
}
|
||||
|
||||
private QueryPage<DatafeedStats> datafeedsStats;
|
||||
|
||||
public Response(QueryPage<DatafeedStats> datafeedsStats) {
|
||||
this.datafeedsStats = datafeedsStats;
|
||||
}
|
||||
|
||||
public Response() {}
|
||||
|
||||
public QueryPage<DatafeedStats> getResponse() {
|
||||
return datafeedsStats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
datafeedsStats = new QueryPage<>(in, DatafeedStats::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
datafeedsStats.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
datafeedsStats.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedsStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(datafeedsStats, other.datafeedsStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeReadAction<Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, GetDatafeedsStatsAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
logger.debug("Get stats for datafeed '{}'", request.getDatafeedId());
|
||||
|
||||
Map<String, DatafeedState> states = new HashMap<>();
|
||||
PersistentTasksInProgress tasksInProgress = state.custom(PersistentTasksInProgress.TYPE);
|
||||
if (tasksInProgress != null) {
|
||||
Predicate<PersistentTaskInProgress<?>> predicate = ALL.equals(request.getDatafeedId()) ? p -> true :
|
||||
p -> request.getDatafeedId().equals(((StartDatafeedAction.Request) p.getRequest()).getDatafeedId());
|
||||
for (PersistentTaskInProgress<?> taskInProgress : tasksInProgress.findTasks(StartDatafeedAction.NAME, predicate)) {
|
||||
StartDatafeedAction.Request storedRequest = (StartDatafeedAction.Request) taskInProgress.getRequest();
|
||||
states.put(storedRequest.getDatafeedId(), DatafeedState.STARTED);
|
||||
}
|
||||
}
|
||||
|
||||
List<Response.DatafeedStats> stats = new ArrayList<>();
|
||||
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
|
||||
if (ALL.equals(request.getDatafeedId())) {
|
||||
Collection<DatafeedConfig> datafeeds = mlMetadata.getDatafeeds().values();
|
||||
for (DatafeedConfig datafeed : datafeeds) {
|
||||
DatafeedState datafeedState = states.getOrDefault(datafeed.getId(), DatafeedState.STOPPED);
|
||||
stats.add(new Response.DatafeedStats(datafeed.getId(), datafeedState));
|
||||
}
|
||||
} else {
|
||||
DatafeedConfig datafeed = mlMetadata.getDatafeed(request.getDatafeedId());
|
||||
if (datafeed == null) {
|
||||
throw ExceptionsHelper.missingDatafeedException(request.getDatafeedId());
|
||||
}
|
||||
DatafeedState datafeedState = states.getOrDefault(datafeed.getId(), DatafeedState.STOPPED);
|
||||
stats.add(new Response.DatafeedStats(datafeed.getId(), datafeedState));
|
||||
}
|
||||
QueryPage<Response.DatafeedStats> statsPage = new QueryPage<>(stats, stats.size(), DatafeedConfig.RESULTS_FIELD);
|
||||
listener.onResponse(new Response(statsPage));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,336 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.get.TransportGetAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.TransportSearchAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.PageParams;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.config.MlFilter;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
||||
|
||||
public class GetFiltersAction extends Action<GetFiltersAction.Request, GetFiltersAction.Response, GetFiltersAction.RequestBuilder> {
|
||||
|
||||
public static final GetFiltersAction INSTANCE = new GetFiltersAction();
|
||||
public static final String NAME = "cluster:admin/ml/filters/get";
|
||||
|
||||
private GetFiltersAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeReadRequest<Request> {
|
||||
|
||||
private String filterId;
|
||||
private PageParams pageParams;
|
||||
|
||||
public Request() {
|
||||
}
|
||||
|
||||
public void setFilterId(String filterId) {
|
||||
if (pageParams != null) {
|
||||
throw new IllegalArgumentException("Param [" + MlFilter.ID.getPreferredName() + "] is incompatible with ["
|
||||
+ PageParams.FROM.getPreferredName()+ ", " + PageParams.SIZE.getPreferredName() + "].");
|
||||
}
|
||||
this.filterId = filterId;
|
||||
}
|
||||
|
||||
public String getFilterId() {
|
||||
return filterId;
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
if (filterId != null) {
|
||||
throw new IllegalArgumentException("Param [" + PageParams.FROM.getPreferredName()
|
||||
+ ", " + PageParams.SIZE.getPreferredName() + "] is incompatible with ["
|
||||
+ MlFilter.ID.getPreferredName() + "].");
|
||||
}
|
||||
this.pageParams = pageParams;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
if (pageParams == null && filterId == null) {
|
||||
validationException = addValidationError("Both [" + MlFilter.ID.getPreferredName() + "] and ["
|
||||
+ PageParams.FROM.getPreferredName() + ", " + PageParams.SIZE.getPreferredName() + "] "
|
||||
+ "cannot be null" , validationException);
|
||||
}
|
||||
return validationException;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
filterId = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeOptionalString(filterId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(filterId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(filterId, other.filterId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, GetFiltersAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements StatusToXContentObject {
|
||||
|
||||
private QueryPage<MlFilter> filters;
|
||||
|
||||
public Response(QueryPage<MlFilter> filters) {
|
||||
this.filters = filters;
|
||||
}
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
public QueryPage<MlFilter> getFilters() {
|
||||
return filters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
filters = new QueryPage<>(in, MlFilter::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
filters.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return RestStatus.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
filters.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(filters);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(filters, other.filters);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeReadAction<Request, Response> {
|
||||
|
||||
private final TransportGetAction transportGetAction;
|
||||
private final TransportSearchAction transportSearchAction;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
TransportGetAction transportGetAction, TransportSearchAction transportSearchAction) {
|
||||
super(settings, GetFiltersAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
this.transportGetAction = transportGetAction;
|
||||
this.transportSearchAction = transportSearchAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
final String filterId = request.getFilterId();
|
||||
if (!Strings.isNullOrEmpty(filterId)) {
|
||||
getFilter(filterId, listener);
|
||||
} else if (request.getPageParams() != null) {
|
||||
getFilters(request.getPageParams(), listener);
|
||||
} else {
|
||||
throw new IllegalStateException("Both filterId and pageParams are null");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
|
||||
private void getFilter(String filterId, ActionListener<Response> listener) {
|
||||
GetRequest getRequest = new GetRequest(JobProvider.ML_META_INDEX, MlFilter.TYPE.getPreferredName(), filterId);
|
||||
transportGetAction.execute(getRequest, new ActionListener<GetResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetResponse getDocResponse) {
|
||||
|
||||
try {
|
||||
QueryPage<MlFilter> responseBody;
|
||||
if (getDocResponse.isExists()) {
|
||||
BytesReference docSource = getDocResponse.getSourceAsBytesRef();
|
||||
XContentParser parser =
|
||||
XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource);
|
||||
MlFilter filter = MlFilter.PARSER.apply(parser, null);
|
||||
responseBody = new QueryPage<>(Collections.singletonList(filter), 1, MlFilter.RESULTS_FIELD);
|
||||
|
||||
Response filterResponse = new Response(responseBody);
|
||||
listener.onResponse(filterResponse);
|
||||
} else {
|
||||
this.onFailure(QueryPage.emptyQueryPage(MlFilter.RESULTS_FIELD));
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
this.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void getFilters(PageParams pageParams, ActionListener<Response> listener) {
|
||||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder()
|
||||
.from(pageParams.getFrom())
|
||||
.size(pageParams.getSize());
|
||||
|
||||
SearchRequest searchRequest = new SearchRequest(new String[]{JobProvider.ML_META_INDEX}, sourceBuilder)
|
||||
.types(MlFilter.TYPE.getPreferredName());
|
||||
|
||||
transportSearchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(SearchResponse response) {
|
||||
|
||||
try {
|
||||
List<MlFilter> docs = new ArrayList<>();
|
||||
for (SearchHit hit : response.getHits().getHits()) {
|
||||
BytesReference docSource = hit.getSourceRef();
|
||||
XContentParser parser =
|
||||
XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource);
|
||||
docs.add(MlFilter.PARSER.apply(parser, null));
|
||||
}
|
||||
|
||||
Response filterResponse = new Response(new QueryPage<>(docs, docs.size(), MlFilter.RESULTS_FIELD));
|
||||
listener.onResponse(filterResponse);
|
||||
|
||||
} catch (Exception e) {
|
||||
this.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,320 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.PageParams;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.results.Influencer;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetInfluencersAction
|
||||
extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetInfluencersAction.RequestBuilder> {
|
||||
|
||||
public static final GetInfluencersAction INSTANCE = new GetInfluencersAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/results/influencers/get";
|
||||
|
||||
private GetInfluencersAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("include_interim");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField SORT_FIELD = new ParseField("sort");
|
||||
public static final ParseField DESCENDING_SORT = new ParseField("desc");
|
||||
|
||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareStringOrNull(Request::setStart, START);
|
||||
PARSER.declareStringOrNull(Request::setEnd, END);
|
||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
PARSER.declareDouble(Request::setAnomalyScore, ANOMALY_SCORE);
|
||||
PARSER.declareString(Request::setSort, SORT_FIELD);
|
||||
PARSER.declareBoolean(Request::setDecending, DESCENDING_SORT);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private String start;
|
||||
private String end;
|
||||
private boolean includeInterim = false;
|
||||
private PageParams pageParams = new PageParams();
|
||||
private double anomalyScoreFilter = 0.0;
|
||||
private String sort = Influencer.ANOMALY_SCORE.getPreferredName();
|
||||
private boolean decending = false;
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public boolean isDecending() {
|
||||
return decending;
|
||||
}
|
||||
|
||||
public void setDecending(boolean decending) {
|
||||
this.decending = decending;
|
||||
}
|
||||
|
||||
public boolean isIncludeInterim() {
|
||||
return includeInterim;
|
||||
}
|
||||
|
||||
public void setIncludeInterim(boolean includeInterim) {
|
||||
this.includeInterim = includeInterim;
|
||||
}
|
||||
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
this.pageParams = pageParams;
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
public double getAnomalyScoreFilter() {
|
||||
return anomalyScoreFilter;
|
||||
}
|
||||
|
||||
public void setAnomalyScore(double anomalyScoreFilter) {
|
||||
this.anomalyScoreFilter = anomalyScoreFilter;
|
||||
}
|
||||
|
||||
public String getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
public void setSort(String sort) {
|
||||
this.sort = ExceptionsHelper.requireNonNull(sort, SORT_FIELD.getPreferredName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
includeInterim = in.readBoolean();
|
||||
pageParams = new PageParams(in);
|
||||
start = in.readOptionalString();
|
||||
end = in.readOptionalString();
|
||||
sort = in.readOptionalString();
|
||||
decending = in.readBoolean();
|
||||
anomalyScoreFilter = in.readDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeBoolean(includeInterim);
|
||||
pageParams.writeTo(out);
|
||||
out.writeOptionalString(start);
|
||||
out.writeOptionalString(end);
|
||||
out.writeOptionalString(sort);
|
||||
out.writeBoolean(decending);
|
||||
out.writeDouble(anomalyScoreFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(INCLUDE_INTERIM.getPreferredName(), includeInterim);
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
builder.field(START.getPreferredName(), start);
|
||||
builder.field(END.getPreferredName(), end);
|
||||
builder.field(SORT_FIELD.getPreferredName(), sort);
|
||||
builder.field(DESCENDING_SORT.getPreferredName(), decending);
|
||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScoreFilter);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, includeInterim, pageParams, start, end, sort, decending, anomalyScoreFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) && Objects.equals(start, other.start) && Objects.equals(end, other.end)
|
||||
&& Objects.equals(includeInterim, other.includeInterim) && Objects.equals(pageParams, other.pageParams)
|
||||
&& Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) && Objects.equals(decending, other.decending)
|
||||
&& Objects.equals(sort, other.sort);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client) {
|
||||
super(client, INSTANCE, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private QueryPage<Influencer> influencers;
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
Response(QueryPage<Influencer> influencers) {
|
||||
this.influencers = influencers;
|
||||
}
|
||||
|
||||
public QueryPage<Influencer> getInfluencers() {
|
||||
return influencers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
influencers = new QueryPage<>(in, Influencer::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
influencers.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
influencers.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(influencers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(influencers, other.influencers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final JobProvider jobProvider;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider) {
|
||||
super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.jobProvider = jobProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
InfluencersQueryBuilder.InfluencersQuery query = new InfluencersQueryBuilder().includeInterim(request.includeInterim)
|
||||
.start(request.start).end(request.end).from(request.pageParams.getFrom()).size(request.pageParams.getSize())
|
||||
.anomalyScoreThreshold(request.anomalyScoreFilter).sortField(request.sort).sortDescending(request.decending).build();
|
||||
jobProvider.influencers(request.jobId, query, page -> listener.onResponse(new Response(page)), listener::onFailure);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,207 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetJobsAction extends Action<GetJobsAction.Request, GetJobsAction.Response, GetJobsAction.RequestBuilder> {
|
||||
|
||||
public static final GetJobsAction INSTANCE = new GetJobsAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/get";
|
||||
|
||||
private GetJobsAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeReadRequest<Request> {
|
||||
|
||||
private String jobId;
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, GetJobsAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private QueryPage<Job> jobs;
|
||||
|
||||
public Response(QueryPage<Job> jobs) {
|
||||
this.jobs = jobs;
|
||||
}
|
||||
|
||||
public Response() {}
|
||||
|
||||
public QueryPage<Job> getResponse() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobs = new QueryPage<>(in, Job::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
jobs.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
jobs.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(jobs, other.jobs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeReadAction<Request, Response> {
|
||||
|
||||
private final JobManager jobManager;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
JobManager jobManager) {
|
||||
super(settings, GetJobsAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
this.jobManager = jobManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
logger.debug("Get job '{}'", request.getJobId());
|
||||
QueryPage<Job> jobs = jobManager.getJob(request.getJobId(), state);
|
||||
listener.onResponse(new Response(jobs));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,413 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||
import org.elasticsearch.action.support.tasks.TransportTasksAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.AtomicArray;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJobsStatsAction.Response, GetJobsStatsAction.RequestBuilder> {
|
||||
|
||||
public static final GetJobsStatsAction INSTANCE = new GetJobsStatsAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/stats/get";
|
||||
|
||||
private static final String DATA_COUNTS = "data_counts";
|
||||
private static final String MODEL_SIZE_STATS = "model_size_stats";
|
||||
private static final String STATE = "state";
|
||||
|
||||
private GetJobsStatsAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends BaseTasksRequest<Request> {
|
||||
|
||||
private String jobId;
|
||||
|
||||
// used internally to expand _all jobid to encapsulate all jobs in cluster:
|
||||
private List<String> expandedJobsIds;
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.expandedJobsIds = Collections.singletonList(jobId);
|
||||
}
|
||||
|
||||
Request() {}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean match(Task task) {
|
||||
return jobId.equals(Job.ALL) || InternalOpenJobAction.JobTask.match(task, jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
expandedJobsIds = in.readList(StreamInput::readString);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeStringList(expandedJobsIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, GetJobsStatsAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends BaseTasksResponse implements ToXContentObject {
|
||||
|
||||
public static class JobStats implements ToXContent, Writeable {
|
||||
private final String jobId;
|
||||
private DataCounts dataCounts;
|
||||
@Nullable
|
||||
private ModelSizeStats modelSizeStats;
|
||||
private JobState state;
|
||||
|
||||
JobStats(String jobId, DataCounts dataCounts, @Nullable ModelSizeStats modelSizeStats, JobState state) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.dataCounts = Objects.requireNonNull(dataCounts);
|
||||
this.modelSizeStats = modelSizeStats;
|
||||
this.state = Objects.requireNonNull(state);
|
||||
}
|
||||
|
||||
JobStats(StreamInput in) throws IOException {
|
||||
jobId = in.readString();
|
||||
dataCounts = new DataCounts(in);
|
||||
modelSizeStats = in.readOptionalWriteable(ModelSizeStats::new);
|
||||
state = JobState.fromStream(in);
|
||||
}
|
||||
|
||||
public String getJobid() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public DataCounts getDataCounts() {
|
||||
return dataCounts;
|
||||
}
|
||||
|
||||
public ModelSizeStats getModelSizeStats() {
|
||||
return modelSizeStats;
|
||||
}
|
||||
|
||||
public JobState getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(DATA_COUNTS, dataCounts);
|
||||
if (modelSizeStats != null) {
|
||||
builder.field(MODEL_SIZE_STATS, modelSizeStats);
|
||||
}
|
||||
builder.field(STATE, state);
|
||||
builder.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
dataCounts.writeTo(out);
|
||||
out.writeOptionalWriteable(modelSizeStats);
|
||||
state.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, dataCounts, modelSizeStats, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
JobStats other = (JobStats) obj;
|
||||
return Objects.equals(jobId, other.jobId)
|
||||
&& Objects.equals(this.dataCounts, other.dataCounts)
|
||||
&& Objects.equals(this.modelSizeStats, other.modelSizeStats)
|
||||
&& Objects.equals(this.state, other.state);
|
||||
}
|
||||
}
|
||||
|
||||
private QueryPage<JobStats> jobsStats;
|
||||
|
||||
public Response(QueryPage<JobStats> jobsStats) {
|
||||
super(Collections.emptyList(), Collections.emptyList());
|
||||
this.jobsStats = jobsStats;
|
||||
}
|
||||
|
||||
Response(List<TaskOperationFailure> taskFailures, List<? extends FailedNodeException> nodeFailures,
|
||||
QueryPage<JobStats> jobsStats) {
|
||||
super(taskFailures, nodeFailures);
|
||||
this.jobsStats = jobsStats;
|
||||
}
|
||||
|
||||
public Response() {
|
||||
super(Collections.emptyList(), Collections.emptyList());
|
||||
}
|
||||
|
||||
public QueryPage<JobStats> getResponse() {
|
||||
return jobsStats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobsStats = new QueryPage<>(in, JobStats::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
jobsStats.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();;
|
||||
jobsStats.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobsStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(jobsStats, other.jobsStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportTasksAction<InternalOpenJobAction.JobTask, Request, Response,
|
||||
QueryPage<Response.JobStats>> {
|
||||
|
||||
private final ClusterService clusterService;
|
||||
private final AutodetectProcessManager processManager;
|
||||
private final JobProvider jobProvider;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters,
|
||||
ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
AutodetectProcessManager processManager, JobProvider jobProvider) {
|
||||
super(settings, GetJobsStatsAction.NAME, threadPool, clusterService, transportService, actionFilters,
|
||||
indexNameExpressionResolver, Request::new, Response::new, ThreadPool.Names.MANAGEMENT);
|
||||
this.clusterService = clusterService;
|
||||
this.processManager = processManager;
|
||||
this.jobProvider = jobProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
|
||||
if (Job.ALL.equals(request.getJobId())) {
|
||||
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
|
||||
request.expandedJobsIds = mlMetadata.getJobs().keySet().stream().collect(Collectors.toList());
|
||||
}
|
||||
|
||||
ActionListener<Response> finalListener = listener;
|
||||
listener = ActionListener.wrap(response -> gatherStatsForClosedJobs(request, response, finalListener), listener::onFailure);
|
||||
super.doExecute(task, request, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse(Request request, List<QueryPage<Response.JobStats>> tasks,
|
||||
List<TaskOperationFailure> taskOperationFailures,
|
||||
List<FailedNodeException> failedNodeExceptions) {
|
||||
List<Response.JobStats> stats = new ArrayList<>();
|
||||
for (QueryPage<Response.JobStats> task : tasks) {
|
||||
stats.addAll(task.results());
|
||||
}
|
||||
return new Response(taskOperationFailures, failedNodeExceptions, new QueryPage<>(stats, stats.size(), Job.RESULTS_FIELD));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected QueryPage<Response.JobStats> readTaskResponse(StreamInput in) throws IOException {
|
||||
return new QueryPage<>(in, Response.JobStats::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean accumulateExceptions() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void taskOperation(Request request, InternalOpenJobAction.JobTask task,
|
||||
ActionListener<QueryPage<Response.JobStats>> listener) {
|
||||
logger.debug("Get stats for job '{}'", request.getJobId());
|
||||
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
|
||||
Optional<Tuple<DataCounts, ModelSizeStats>> stats = processManager.getStatistics(request.getJobId());
|
||||
if (stats.isPresent()) {
|
||||
JobState jobState = mlMetadata.getAllocations().get(request.jobId).getState();
|
||||
Response.JobStats jobStats = new Response.JobStats(request.jobId, stats.get().v1(), stats.get().v2(), jobState);
|
||||
listener.onResponse(new QueryPage<>(Collections.singletonList(jobStats), 1, Job.RESULTS_FIELD));
|
||||
} else {
|
||||
listener.onResponse(new QueryPage<>(Collections.emptyList(), 0, Job.RESULTS_FIELD));
|
||||
}
|
||||
}
|
||||
|
||||
// Up until now we gathered the stats for jobs that were open,
|
||||
// This method will fetch the stats for missing jobs, that was stored in the jobs index
|
||||
void gatherStatsForClosedJobs(Request request, Response response, ActionListener<Response> listener) {
|
||||
List<String> jobIds = determineJobIdsWithoutLiveStats(request.expandedJobsIds, response.jobsStats.results());
|
||||
if (jobIds.isEmpty()) {
|
||||
listener.onResponse(response);
|
||||
return;
|
||||
}
|
||||
|
||||
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
|
||||
AtomicInteger counter = new AtomicInteger(jobIds.size());
|
||||
AtomicArray<Response.JobStats> jobStats = new AtomicArray<>(jobIds.size());
|
||||
for (int i = 0; i < jobIds.size(); i++) {
|
||||
int slot = i;
|
||||
String jobId = jobIds.get(i);
|
||||
gatherDataCountsAndModelSizeStats(jobId, (dataCounts, modelSizeStats) -> {
|
||||
JobState jobState = mlMetadata.getAllocations().get(jobId).getState();
|
||||
jobStats.set(slot, new Response.JobStats(jobId, dataCounts, modelSizeStats, jobState));
|
||||
if (counter.decrementAndGet() == 0) {
|
||||
List<Response.JobStats> results = response.getResponse().results();
|
||||
results.addAll(jobStats.asList().stream()
|
||||
.map(e -> e.value)
|
||||
.collect(Collectors.toList()));
|
||||
listener.onResponse(new Response(response.getTaskFailures(), response.getNodeFailures(),
|
||||
new QueryPage<>(results, results.size(), Job.RESULTS_FIELD)));
|
||||
}
|
||||
}, listener::onFailure);
|
||||
}
|
||||
}
|
||||
|
||||
void gatherDataCountsAndModelSizeStats(String jobId, BiConsumer<DataCounts, ModelSizeStats> handler,
|
||||
Consumer<Exception> errorHandler) {
|
||||
jobProvider.dataCounts(jobId, dataCounts -> {
|
||||
jobProvider.modelSizeStats(jobId, modelSizeStats -> {
|
||||
handler.accept(dataCounts, modelSizeStats);
|
||||
}, errorHandler);
|
||||
}, errorHandler);
|
||||
}
|
||||
|
||||
static List<String> determineJobIdsWithoutLiveStats(List<String> requestedJobIds, List<Response.JobStats> stats) {
|
||||
List<String> jobIds = new ArrayList<>();
|
||||
outer: for (String jobId : requestedJobIds) {
|
||||
for (Response.JobStats stat : stats) {
|
||||
if (stat.getJobid().equals(jobId)) {
|
||||
// we already have stats, no need to get stats for this job from an index
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
jobIds.add(jobId);
|
||||
}
|
||||
return jobIds;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,350 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.PageParams;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetModelSnapshotsAction
|
||||
extends Action<GetModelSnapshotsAction.Request, GetModelSnapshotsAction.Response, GetModelSnapshotsAction.RequestBuilder> {
|
||||
|
||||
public static final GetModelSnapshotsAction INSTANCE = new GetModelSnapshotsAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/model_snapshots/get";
|
||||
|
||||
private GetModelSnapshotsAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetModelSnapshotsAction.RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetModelSnapshotsAction.Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id");
|
||||
public static final ParseField SORT = new ParseField("sort");
|
||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField DESC = new ParseField("desc");
|
||||
|
||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareString((request, snapshotId) -> request.snapshotId = snapshotId, SNAPSHOT_ID);
|
||||
PARSER.declareString(Request::setDescriptionString, DESCRIPTION);
|
||||
PARSER.declareString(Request::setStart, START);
|
||||
PARSER.declareString(Request::setEnd, END);
|
||||
PARSER.declareString(Request::setSort, SORT);
|
||||
PARSER.declareBoolean(Request::setDescOrder, DESC);
|
||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, String snapshotId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
if (snapshotId != null) {
|
||||
request.snapshotId = snapshotId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private String snapshotId;
|
||||
private String sort;
|
||||
private String description;
|
||||
private String start;
|
||||
private String end;
|
||||
private boolean desc;
|
||||
private PageParams pageParams = new PageParams();
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId, String snapshotId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.snapshotId = snapshotId;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getSnapshotId() {
|
||||
return snapshotId;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
public void setSort(String sort) {
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
public boolean getDescOrder() {
|
||||
return desc;
|
||||
}
|
||||
|
||||
public void setDescOrder(boolean desc) {
|
||||
this.desc = desc;
|
||||
}
|
||||
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
this.pageParams = ExceptionsHelper.requireNonNull(pageParams, PageParams.PAGE.getPreferredName());
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(String start) {
|
||||
this.start = ExceptionsHelper.requireNonNull(start, START.getPreferredName());
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(String end) {
|
||||
this.end = ExceptionsHelper.requireNonNull(end, END.getPreferredName());
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getDescriptionString() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescriptionString(String description) {
|
||||
this.description = ExceptionsHelper.requireNonNull(description, DESCRIPTION.getPreferredName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
snapshotId = in.readOptionalString();
|
||||
sort = in.readOptionalString();
|
||||
description = in.readOptionalString();
|
||||
start = in.readOptionalString();
|
||||
end = in.readOptionalString();
|
||||
desc = in.readBoolean();
|
||||
pageParams = new PageParams(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeOptionalString(snapshotId);
|
||||
out.writeOptionalString(sort);
|
||||
out.writeOptionalString(description);
|
||||
out.writeOptionalString(start);
|
||||
out.writeOptionalString(end);
|
||||
out.writeBoolean(desc);
|
||||
pageParams.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (snapshotId != null) {
|
||||
builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId);
|
||||
}
|
||||
if (description != null) {
|
||||
builder.field(DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
if (start != null) {
|
||||
builder.field(START.getPreferredName(), start);
|
||||
}
|
||||
if (end != null) {
|
||||
builder.field(END.getPreferredName(), end);
|
||||
}
|
||||
if (sort != null) {
|
||||
builder.field(SORT.getPreferredName(), sort);
|
||||
}
|
||||
builder.field(DESC.getPreferredName(), desc);
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, snapshotId, description, start, end, sort, desc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) && Objects.equals(snapshotId, other.snapshotId)
|
||||
&& Objects.equals(description, other.description) && Objects.equals(start, other.start)
|
||||
&& Objects.equals(end, other.end) && Objects.equals(sort, other.sort) && Objects.equals(desc, other.desc);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private QueryPage<ModelSnapshot> page;
|
||||
|
||||
public Response(QueryPage<ModelSnapshot> page) {
|
||||
this.page = page;
|
||||
}
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
public QueryPage<ModelSnapshot> getPage() {
|
||||
return page;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
page = new QueryPage<>(in, ModelSnapshot::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
page.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
page.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(page);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(page, other.page);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, GetModelSnapshotsAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final JobProvider jobProvider;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, JobProvider jobProvider) {
|
||||
super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.jobProvider = jobProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
logger.debug("Get model snapshots for job {} snapshot ID {}. from = {}, size = {}"
|
||||
+ " start = '{}', end='{}', sort={} descending={}, description filter={}",
|
||||
request.getJobId(), request.getSnapshotId(), request.pageParams.getFrom(), request.pageParams.getSize(),
|
||||
request.getStart(), request.getEnd(), request.getSort(), request.getDescOrder(), request.getDescriptionString());
|
||||
|
||||
jobProvider.modelSnapshots(request.getJobId(), request.pageParams.getFrom(), request.pageParams.getSize(),
|
||||
request.getStart(), request.getEnd(), request.getSort(), request.getDescOrder(), request.getSnapshotId(),
|
||||
request.getDescriptionString(),
|
||||
page -> {
|
||||
clearQuantiles(page);
|
||||
listener.onResponse(new Response(page));
|
||||
}, listener::onFailure);
|
||||
}
|
||||
|
||||
public static void clearQuantiles(QueryPage<ModelSnapshot> page) {
|
||||
if (page.results() != null) {
|
||||
for (ModelSnapshot modelSnapshot : page.results()) {
|
||||
modelSnapshot.setQuantiles(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,364 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.PageParams;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
|
||||
import org.elasticsearch.xpack.ml.job.results.Influencer;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecordsAction.Response, GetRecordsAction.RequestBuilder> {
|
||||
|
||||
public static final GetRecordsAction INSTANCE = new GetRecordsAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/results/records/get";
|
||||
|
||||
private GetRecordsAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("include_interim");
|
||||
public static final ParseField ANOMALY_SCORE_FILTER = new ParseField("anomaly_score");
|
||||
public static final ParseField SORT = new ParseField("sort");
|
||||
public static final ParseField DESCENDING = new ParseField("desc");
|
||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("normalized_probability");
|
||||
public static final ParseField PARTITION_VALUE = new ParseField("partition_value");
|
||||
|
||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareStringOrNull(Request::setStart, START);
|
||||
PARSER.declareStringOrNull(Request::setEnd, END);
|
||||
PARSER.declareString(Request::setPartitionValue, PARTITION_VALUE);
|
||||
PARSER.declareString(Request::setSort, SORT);
|
||||
PARSER.declareBoolean(Request::setDecending, DESCENDING);
|
||||
PARSER.declareBoolean(Request::setIncludeInterim, INCLUDE_INTERIM);
|
||||
PARSER.declareObject(Request::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||
PARSER.declareDouble(Request::setAnomalyScore, ANOMALY_SCORE_FILTER);
|
||||
PARSER.declareDouble(Request::setMaxNormalizedProbability, MAX_NORMALIZED_PROBABILITY);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private String start;
|
||||
private String end;
|
||||
private boolean includeInterim = false;
|
||||
private PageParams pageParams = new PageParams();
|
||||
private double anomalyScoreFilter = 0.0;
|
||||
private String sort = Influencer.ANOMALY_SCORE.getPreferredName();
|
||||
private boolean decending = false;
|
||||
private double maxNormalizedProbability = 0.0;
|
||||
private String partitionValue;
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public void setStart(String start) {
|
||||
this.start = start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public void setEnd(String end) {
|
||||
this.end = end;
|
||||
}
|
||||
|
||||
public boolean isDecending() {
|
||||
return decending;
|
||||
}
|
||||
|
||||
public void setDecending(boolean decending) {
|
||||
this.decending = decending;
|
||||
}
|
||||
|
||||
public boolean isIncludeInterim() {
|
||||
return includeInterim;
|
||||
}
|
||||
|
||||
public void setIncludeInterim(boolean includeInterim) {
|
||||
this.includeInterim = includeInterim;
|
||||
}
|
||||
|
||||
public void setPageParams(PageParams pageParams) {
|
||||
this.pageParams = pageParams;
|
||||
}
|
||||
public PageParams getPageParams() {
|
||||
return pageParams;
|
||||
}
|
||||
|
||||
public double getAnomalyScoreFilter() {
|
||||
return anomalyScoreFilter;
|
||||
}
|
||||
|
||||
public void setAnomalyScore(double anomalyScoreFilter) {
|
||||
this.anomalyScoreFilter = anomalyScoreFilter;
|
||||
}
|
||||
|
||||
public String getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
public void setSort(String sort) {
|
||||
this.sort = ExceptionsHelper.requireNonNull(sort, SORT.getPreferredName());
|
||||
}
|
||||
|
||||
public double getMaxNormalizedProbability() {
|
||||
return maxNormalizedProbability;
|
||||
}
|
||||
|
||||
public void setMaxNormalizedProbability(double maxNormalizedProbability) {
|
||||
this.maxNormalizedProbability = maxNormalizedProbability;
|
||||
}
|
||||
|
||||
public String getPartitionValue() {
|
||||
return partitionValue;
|
||||
}
|
||||
|
||||
public void setPartitionValue(String partitionValue) {
|
||||
this.partitionValue = ExceptionsHelper.requireNonNull(partitionValue, PARTITION_VALUE.getPreferredName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
includeInterim = in.readBoolean();
|
||||
pageParams = new PageParams(in);
|
||||
start = in.readOptionalString();
|
||||
end = in.readOptionalString();
|
||||
sort = in.readOptionalString();
|
||||
decending = in.readBoolean();
|
||||
anomalyScoreFilter = in.readDouble();
|
||||
maxNormalizedProbability = in.readDouble();
|
||||
partitionValue = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeBoolean(includeInterim);
|
||||
pageParams.writeTo(out);
|
||||
out.writeOptionalString(start);
|
||||
out.writeOptionalString(end);
|
||||
out.writeOptionalString(sort);
|
||||
out.writeBoolean(decending);
|
||||
out.writeDouble(anomalyScoreFilter);
|
||||
out.writeDouble(maxNormalizedProbability);
|
||||
out.writeOptionalString(partitionValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(START.getPreferredName(), start);
|
||||
builder.field(END.getPreferredName(), end);
|
||||
builder.field(SORT.getPreferredName(), sort);
|
||||
builder.field(DESCENDING.getPreferredName(), decending);
|
||||
builder.field(ANOMALY_SCORE_FILTER.getPreferredName(), anomalyScoreFilter);
|
||||
builder.field(INCLUDE_INTERIM.getPreferredName(), includeInterim);
|
||||
builder.field(MAX_NORMALIZED_PROBABILITY.getPreferredName(), maxNormalizedProbability);
|
||||
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||
if (partitionValue != null) {
|
||||
builder.field(PARTITION_VALUE.getPreferredName(), partitionValue);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, start, end, sort, decending, anomalyScoreFilter, includeInterim, maxNormalizedProbability,
|
||||
pageParams, partitionValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(sort, other.sort) &&
|
||||
Objects.equals(decending, other.decending) &&
|
||||
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
|
||||
Objects.equals(includeInterim, other.includeInterim) &&
|
||||
Objects.equals(maxNormalizedProbability, other.maxNormalizedProbability) &&
|
||||
Objects.equals(pageParams, other.pageParams) &&
|
||||
Objects.equals(partitionValue, other.partitionValue);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client) {
|
||||
super(client, INSTANCE, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private QueryPage<AnomalyRecord> records;
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
Response(QueryPage<AnomalyRecord> records) {
|
||||
this.records = records;
|
||||
}
|
||||
|
||||
public QueryPage<AnomalyRecord> getRecords() {
|
||||
return records;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
records = new QueryPage<>(in, AnomalyRecord::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
records.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
records.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(records);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(records, other.records);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final JobProvider jobProvider;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
JobProvider jobProvider) {
|
||||
super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.jobProvider = jobProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
RecordsQueryBuilder.RecordsQuery query = new RecordsQueryBuilder()
|
||||
.includeInterim(request.includeInterim)
|
||||
.epochStart(request.start)
|
||||
.epochEnd(request.end)
|
||||
.from(request.pageParams.getFrom())
|
||||
.size(request.pageParams.getSize())
|
||||
.anomalyScoreThreshold(request.anomalyScoreFilter)
|
||||
.sortField(request.sort)
|
||||
.sortDescending(request.decending)
|
||||
.build();
|
||||
jobProvider.records(request.jobId, query, page -> listener.onResponse(new Response(page)), listener::onFailure);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,135 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.CancellableTask;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
|
||||
|
||||
public class InternalOpenJobAction extends Action<InternalOpenJobAction.Request, InternalOpenJobAction.Response,
|
||||
InternalOpenJobAction.RequestBuilder> {
|
||||
|
||||
public static final InternalOpenJobAction INSTANCE = new InternalOpenJobAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/internal_open";
|
||||
|
||||
private InternalOpenJobAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends OpenJobAction.Request {
|
||||
|
||||
public Request(String jobId) {
|
||||
super(jobId);
|
||||
}
|
||||
|
||||
Request() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
|
||||
return new JobTask(getJobId(), id, type, action, parentTaskId);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, InternalOpenJobAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse {
|
||||
|
||||
Response() {}
|
||||
|
||||
}
|
||||
|
||||
public static class JobTask extends CancellableTask {
|
||||
|
||||
private volatile Runnable cancelHandler;
|
||||
|
||||
JobTask(String jobId, long id, String type, String action, TaskId parentTask) {
|
||||
super(id, type, action, "job-" + jobId, parentTask);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean shouldCancelChildrenOnCancellation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onCancelled() {
|
||||
cancelHandler.run();
|
||||
}
|
||||
|
||||
static boolean match(Task task, String expectedJobId) {
|
||||
String expectedDescription = "job-" + expectedJobId;
|
||||
return task instanceof JobTask && expectedDescription.equals(task.getDescription());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final AutodetectProcessManager autodetectProcessManager;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
AutodetectProcessManager autodetectProcessManager) {
|
||||
super(settings, InternalOpenJobAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
|
||||
Request::new);
|
||||
this.autodetectProcessManager = autodetectProcessManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
|
||||
JobTask jobTask = (JobTask) task;
|
||||
autodetectProcessManager.setJobState(request.getJobId(), JobState.OPENING, aVoid -> {
|
||||
jobTask.cancelHandler = () -> autodetectProcessManager.closeJob(request.getJobId());
|
||||
autodetectProcessManager.openJob(request.getJobId(), request.isIgnoreDowntime(), e -> {
|
||||
if (e == null) {
|
||||
listener.onResponse(new Response());
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}, listener::onFailure);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
throw new IllegalStateException("shouldn't get invoked");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,113 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.bulk.byscroll.AbstractBulkByScrollRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.byscroll.AsyncDeleteByQueryAction;
|
||||
import org.elasticsearch.action.bulk.byscroll.BulkByScrollParallelizationHelper;
|
||||
import org.elasticsearch.action.bulk.byscroll.BulkByScrollResponse;
|
||||
import org.elasticsearch.action.bulk.byscroll.DeleteByQueryRequest;
|
||||
import org.elasticsearch.action.bulk.byscroll.ParentBulkByScrollTask;
|
||||
import org.elasticsearch.action.bulk.byscroll.WorkingBulkByScrollTask;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.client.ParentTaskAssigningClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
public class MlDeleteByQueryAction extends Action<DeleteByQueryRequest, BulkByScrollResponse,
|
||||
MlDeleteByQueryAction.MlDeleteByQueryRequestBuilder> {
|
||||
|
||||
public static final MlDeleteByQueryAction INSTANCE = new MlDeleteByQueryAction();
|
||||
public static final String NAME = "indices:data/write/delete/mlbyquery";
|
||||
|
||||
private MlDeleteByQueryAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MlDeleteByQueryRequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new MlDeleteByQueryRequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkByScrollResponse newResponse() {
|
||||
return new BulkByScrollResponse();
|
||||
}
|
||||
|
||||
public static class MlDeleteByQueryRequestBuilder extends
|
||||
AbstractBulkByScrollRequestBuilder<DeleteByQueryRequest, MlDeleteByQueryRequestBuilder> {
|
||||
|
||||
public MlDeleteByQueryRequestBuilder(ElasticsearchClient client,
|
||||
Action<DeleteByQueryRequest, BulkByScrollResponse, MlDeleteByQueryRequestBuilder> action) {
|
||||
this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE));
|
||||
}
|
||||
|
||||
private MlDeleteByQueryRequestBuilder(ElasticsearchClient client,
|
||||
Action<DeleteByQueryRequest, BulkByScrollResponse, MlDeleteByQueryRequestBuilder> action,
|
||||
SearchRequestBuilder search) {
|
||||
super(client, action, search, new DeleteByQueryRequest(search.request()));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MlDeleteByQueryRequestBuilder self() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MlDeleteByQueryRequestBuilder abortOnVersionConflict(boolean abortOnVersionConflict) {
|
||||
request.setAbortOnVersionConflict(abortOnVersionConflict);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<DeleteByQueryRequest, BulkByScrollResponse> {
|
||||
private final Client client;
|
||||
private final ScriptService scriptService;
|
||||
private final ClusterService clusterService;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver resolver, Client client, TransportService transportService,
|
||||
ScriptService scriptService, ClusterService clusterService) {
|
||||
super(settings, MlDeleteByQueryAction.NAME, threadPool, transportService, actionFilters, resolver, DeleteByQueryRequest::new);
|
||||
this.client = client;
|
||||
this.scriptService = scriptService;
|
||||
this.clusterService = clusterService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doExecute(Task task, DeleteByQueryRequest request, ActionListener<BulkByScrollResponse> listener) {
|
||||
if (request.getSlices() > 1) {
|
||||
BulkByScrollParallelizationHelper.startSlices(client, taskManager, MlDeleteByQueryAction.INSTANCE,
|
||||
clusterService.localNode().getId(), (ParentBulkByScrollTask) task, request, listener);
|
||||
} else {
|
||||
ClusterState state = clusterService.state();
|
||||
ParentTaskAssigningClient client = new ParentTaskAssigningClient(this.client, clusterService.localNode(), task);
|
||||
new AsyncDeleteByQueryAction((WorkingBulkByScrollTask) task, logger, client, threadPool, request, scriptService, state,
|
||||
listener).start();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(DeleteByQueryRequest request, ActionListener<BulkByScrollResponse> listener) {
|
||||
throw new UnsupportedOperationException("task required");
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,236 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.tasks.LoggingTaskListener;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.JobStateObserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.Response, OpenJobAction.RequestBuilder> {
|
||||
|
||||
public static final OpenJobAction INSTANCE = new OpenJobAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/open";
|
||||
|
||||
private OpenJobAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest {
|
||||
|
||||
public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime");
|
||||
|
||||
private String jobId;
|
||||
private boolean ignoreDowntime;
|
||||
private TimeValue openTimeout = TimeValue.timeValueSeconds(20);
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
public boolean isIgnoreDowntime() {
|
||||
return ignoreDowntime;
|
||||
}
|
||||
|
||||
public void setIgnoreDowntime(boolean ignoreDowntime) {
|
||||
this.ignoreDowntime = ignoreDowntime;
|
||||
}
|
||||
|
||||
public TimeValue getOpenTimeout() {
|
||||
return openTimeout;
|
||||
}
|
||||
|
||||
public void setOpenTimeout(TimeValue openTimeout) {
|
||||
this.openTimeout = openTimeout;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
ignoreDowntime = in.readBoolean();
|
||||
openTimeout = TimeValue.timeValueMillis(in.readVLong());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeBoolean(ignoreDowntime);
|
||||
out.writeVLong(openTimeout.millis());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, ignoreDowntime, openTimeout);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
OpenJobAction.Request other = (OpenJobAction.Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(ignoreDowntime, other.ignoreDowntime) &&
|
||||
Objects.equals(openTimeout, other.openTimeout);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, OpenJobAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private boolean opened;
|
||||
|
||||
Response() {}
|
||||
|
||||
Response(boolean opened) {
|
||||
this.opened = opened;
|
||||
}
|
||||
|
||||
public boolean isOpened() {
|
||||
return opened;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
opened = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(opened);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("opened", opened);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Response response = (Response) o;
|
||||
return opened == response.opened;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(opened);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final JobStateObserver observer;
|
||||
private final ClusterService clusterService;
|
||||
private final InternalOpenJobAction.TransportAction internalOpenJobAction;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
ClusterService clusterService, InternalOpenJobAction.TransportAction internalOpenJobAction) {
|
||||
super(settings, OpenJobAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.clusterService = clusterService;
|
||||
this.observer = new JobStateObserver(threadPool, clusterService);
|
||||
this.internalOpenJobAction = internalOpenJobAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
// This validation happens also in InternalOpenJobAction, the reason we do it here too is that if it fails there
|
||||
// we are unable to provide the user immediate feedback. We would create the task and the validation would fail
|
||||
// in the background, whereas now the validation failure is part of the response being returned.
|
||||
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
|
||||
validate(mlMetadata, request.getJobId());
|
||||
|
||||
InternalOpenJobAction.Request internalRequest = new InternalOpenJobAction.Request(request.jobId);
|
||||
internalOpenJobAction.execute(internalRequest, LoggingTaskListener.instance());
|
||||
observer.waitForState(request.getJobId(), request.openTimeout, JobState.OPENED, e -> {
|
||||
if (e != null) {
|
||||
listener.onFailure(e);
|
||||
} else {
|
||||
listener.onResponse(new Response(true));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Fail fast before trying to update the job state on master node if the job doesn't exist or its state
|
||||
* is not what it should be.
|
||||
*/
|
||||
public static void validate(MlMetadata mlMetadata, String jobId) {
|
||||
MlMetadata.Builder builder = new MlMetadata.Builder(mlMetadata);
|
||||
builder.updateState(jobId, JobState.OPENING, null);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,253 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.MlPlugin;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.ml.job.config.DataDescription;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class PostDataAction extends Action<PostDataAction.Request, PostDataAction.Response, PostDataAction.RequestBuilder> {
|
||||
|
||||
public static final PostDataAction INSTANCE = new PostDataAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/data/post";
|
||||
|
||||
private PostDataAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, PostDataAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends BaseTasksResponse implements StatusToXContentObject, Writeable {
|
||||
|
||||
private DataCounts dataCounts;
|
||||
|
||||
Response(String jobId) {
|
||||
dataCounts = new DataCounts(jobId);
|
||||
}
|
||||
|
||||
private Response() {
|
||||
}
|
||||
|
||||
public Response(DataCounts counts) {
|
||||
super(null, null);
|
||||
this.dataCounts = counts;
|
||||
}
|
||||
|
||||
public DataCounts getDataCounts() {
|
||||
return dataCounts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
dataCounts = new DataCounts(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
dataCounts.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return RestStatus.ACCEPTED;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
dataCounts.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(dataCounts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
|
||||
return Objects.equals(dataCounts, other.dataCounts);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static class Request extends TransportJobTaskAction.JobTaskRequest<Request> {
|
||||
|
||||
public static final ParseField RESET_START = new ParseField("reset_start");
|
||||
public static final ParseField RESET_END = new ParseField("reset_end");
|
||||
|
||||
private String resetStart = "";
|
||||
private String resetEnd = "";
|
||||
private DataDescription dataDescription;
|
||||
private BytesReference content;
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId) {
|
||||
super(jobId);
|
||||
}
|
||||
|
||||
public String getResetStart() {
|
||||
return resetStart;
|
||||
}
|
||||
|
||||
public void setResetStart(String resetStart) {
|
||||
this.resetStart = resetStart;
|
||||
}
|
||||
|
||||
public String getResetEnd() {
|
||||
return resetEnd;
|
||||
}
|
||||
|
||||
public void setResetEnd(String resetEnd) {
|
||||
this.resetEnd = resetEnd;
|
||||
}
|
||||
|
||||
public DataDescription getDataDescription() {
|
||||
return dataDescription;
|
||||
}
|
||||
|
||||
public void setDataDescription(DataDescription dataDescription) {
|
||||
this.dataDescription = dataDescription;
|
||||
}
|
||||
|
||||
public BytesReference getContent() { return content; }
|
||||
|
||||
public void setContent(BytesReference content) {
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
resetStart = in.readOptionalString();
|
||||
resetEnd = in.readOptionalString();
|
||||
dataDescription = in.readOptionalWriteable(DataDescription::new);
|
||||
content = in.readBytesReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeOptionalString(resetStart);
|
||||
out.writeOptionalString(resetEnd);
|
||||
out.writeOptionalWriteable(dataDescription);
|
||||
out.writeBytesReference(content);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// content stream not included
|
||||
return Objects.hash(jobId, resetStart, resetEnd, dataDescription);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
|
||||
// content stream not included
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(resetStart, other.resetStart) &&
|
||||
Objects.equals(resetEnd, other.resetEnd) &&
|
||||
Objects.equals(dataDescription, other.dataDescription);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class TransportAction extends TransportJobTaskAction<InternalOpenJobAction.JobTask, Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
JobManager jobManager, AutodetectProcessManager processManager) {
|
||||
super(settings, PostDataAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
|
||||
Request::new, Response::new, MlPlugin.THREAD_POOL_NAME, jobManager, processManager, Request::getJobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response readTaskResponse(StreamInput in) throws IOException {
|
||||
Response response = new Response();
|
||||
response.readFrom(in);
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void taskOperation(Request request, InternalOpenJobAction.JobTask task, ActionListener<Response> listener) {
|
||||
TimeRange timeRange = TimeRange.builder().startTime(request.getResetStart()).endTime(request.getResetEnd()).build();
|
||||
DataLoadParams params = new DataLoadParams(timeRange, Optional.ofNullable(request.getDataDescription()));
|
||||
threadPool.executor(MlPlugin.THREAD_POOL_NAME).execute(() -> {
|
||||
try {
|
||||
DataCounts dataCounts = processManager.processData(request.getJobId(), request.content.streamInput(), params);
|
||||
listener.onResponse(new Response(dataCounts));
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,230 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PutDatafeedAction extends Action<PutDatafeedAction.Request, PutDatafeedAction.Response, PutDatafeedAction.RequestBuilder> {
|
||||
|
||||
public static final PutDatafeedAction INSTANCE = new PutDatafeedAction();
|
||||
public static final String NAME = "cluster:admin/ml/datafeeds/put";
|
||||
|
||||
private PutDatafeedAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
|
||||
|
||||
public static Request parseRequest(String datafeedId, XContentParser parser) {
|
||||
DatafeedConfig.Builder datafeed = DatafeedConfig.PARSER.apply(parser, null);
|
||||
datafeed.setId(datafeedId);
|
||||
return new Request(datafeed.build());
|
||||
}
|
||||
|
||||
private DatafeedConfig datafeed;
|
||||
|
||||
public Request(DatafeedConfig datafeed) {
|
||||
this.datafeed = datafeed;
|
||||
}
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public DatafeedConfig getDatafeed() {
|
||||
return datafeed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
datafeed = new DatafeedConfig(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
datafeed.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
datafeed.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return Objects.equals(datafeed, request.datafeed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeed);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, PutDatafeedAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse implements ToXContentObject {
|
||||
|
||||
private DatafeedConfig datafeed;
|
||||
|
||||
public Response(boolean acked, DatafeedConfig datafeed) {
|
||||
super(acked);
|
||||
this.datafeed = datafeed;
|
||||
}
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
public DatafeedConfig getResponse() {
|
||||
return datafeed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
datafeed = new DatafeedConfig(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
datafeed.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
datafeed.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Response response = (Response) o;
|
||||
return Objects.equals(datafeed, response.datafeed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeed);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, PutDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
clusterService.submitStateUpdateTask("put-datafeed-" + request.getDatafeed().getId(),
|
||||
new AckedClusterStateUpdateTask<Response>(request, listener) {
|
||||
|
||||
@Override
|
||||
protected Response newResponse(boolean acknowledged) {
|
||||
if (acknowledged) {
|
||||
logger.info("Created datafeed [{}]", request.getDatafeed().getId());
|
||||
}
|
||||
return new Response(acknowledged, request.getDatafeed());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
return putDatafeed(request, currentState);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private ClusterState putDatafeed(Request request, ClusterState clusterState) {
|
||||
MlMetadata currentMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE);
|
||||
MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata)
|
||||
.putDatafeed(request.getDatafeed()).build();
|
||||
return ClusterState.builder(clusterState).metaData(
|
||||
MetaData.builder(clusterState.getMetaData()).putCustom(MlMetadata.TYPE, newMetadata).build())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,203 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.index.TransportIndexAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.MlFilter;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
|
||||
public class PutFilterAction extends Action<PutFilterAction.Request, PutFilterAction.Response, PutFilterAction.RequestBuilder> {
|
||||
|
||||
public static final PutFilterAction INSTANCE = new PutFilterAction();
|
||||
public static final String NAME = "cluster:admin/ml/filters/put";
|
||||
|
||||
private PutFilterAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeReadRequest<Request> implements ToXContent {
|
||||
|
||||
public static Request parseRequest(XContentParser parser) {
|
||||
MlFilter filter = MlFilter.PARSER.apply(parser, null);
|
||||
return new Request(filter);
|
||||
}
|
||||
|
||||
private MlFilter filter;
|
||||
|
||||
Request() {
|
||||
|
||||
}
|
||||
|
||||
public Request(MlFilter filter) {
|
||||
this.filter = ExceptionsHelper.requireNonNull(filter, "filter");
|
||||
}
|
||||
|
||||
public MlFilter getFilter() {
|
||||
return this.filter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
filter = new MlFilter(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
filter.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
filter.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(filter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(filter, other.filter);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, PutFilterAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
public static class Response extends AcknowledgedResponse {
|
||||
|
||||
public Response() {
|
||||
super(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// extends TransportMasterNodeAction, because we will store in cluster state.
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
|
||||
|
||||
private final TransportIndexAction transportIndexAction;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
TransportIndexAction transportIndexAction) {
|
||||
super(settings, PutFilterAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
this.transportIndexAction = transportIndexAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
MlFilter filter = request.getFilter();
|
||||
final String filterId = filter.getId();
|
||||
IndexRequest indexRequest = new IndexRequest(JobProvider.ML_META_INDEX, MlFilter.TYPE.getPreferredName(), filterId);
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
indexRequest.source(filter.toXContent(builder, ToXContent.EMPTY_PARAMS));
|
||||
transportIndexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse indexResponse) {
|
||||
listener.onResponse(new Response());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("Could not create filter with ID [" + filterId + "]", e);
|
||||
throw new ResourceNotFoundException("Could not create filter with ID [" + filterId + "]", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,213 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PutJobAction extends Action<PutJobAction.Request, PutJobAction.Response, PutJobAction.RequestBuilder> {
|
||||
|
||||
public static final PutJobAction INSTANCE = new PutJobAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/put";
|
||||
|
||||
private PutJobAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
|
||||
|
||||
public static Request parseRequest(String jobId, XContentParser parser) {
|
||||
Job job = Job.PARSER.apply(parser, null).build(true, jobId);
|
||||
return new Request(job);
|
||||
}
|
||||
|
||||
private Job job;
|
||||
|
||||
public Request(Job job) {
|
||||
this.job = job;
|
||||
}
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Job getJob() {
|
||||
return job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
job = new Job(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
job.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
job.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return Objects.equals(job, request.job);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(job);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, PutJobAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse implements ToXContentObject {
|
||||
|
||||
private Job job;
|
||||
|
||||
public Response(boolean acked, Job job) {
|
||||
super(acked);
|
||||
this.job = job;
|
||||
}
|
||||
|
||||
Response() {
|
||||
}
|
||||
|
||||
public Job getResponse() {
|
||||
return job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
job = new Job(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
job.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
// Don't serialize acknowledged because current api directly serializes the job details
|
||||
builder.startObject();
|
||||
job.doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Response response = (Response) o;
|
||||
return Objects.equals(job, response.job);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(job);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
|
||||
|
||||
private final JobManager jobManager;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager) {
|
||||
super(settings, PutJobAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
this.jobManager = jobManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
jobManager.putJob(request, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,392 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobDataDeleter;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class RevertModelSnapshotAction
|
||||
extends Action<RevertModelSnapshotAction.Request, RevertModelSnapshotAction.Response, RevertModelSnapshotAction.RequestBuilder> {
|
||||
|
||||
public static final RevertModelSnapshotAction INSTANCE = new RevertModelSnapshotAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/model_snapshots/revert";
|
||||
|
||||
private RevertModelSnapshotAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
|
||||
|
||||
public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id");
|
||||
public static final ParseField DELETE_INTERVENING = new ParseField("delete_intervening_results");
|
||||
|
||||
private static ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareString((request, snapshotId) -> request.snapshotId = snapshotId, SNAPSHOT_ID);
|
||||
PARSER.declareBoolean(Request::setDeleteInterveningResults, DELETE_INTERVENING);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, String snapshotId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
if (snapshotId != null) {
|
||||
request.snapshotId = snapshotId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private String snapshotId;
|
||||
private boolean deleteInterveningResults;
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId, String snapshotId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, SNAPSHOT_ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public String getSnapshotId() {
|
||||
return snapshotId;
|
||||
}
|
||||
|
||||
public boolean getDeleteInterveningResults() {
|
||||
return deleteInterveningResults;
|
||||
}
|
||||
|
||||
public void setDeleteInterveningResults(boolean deleteInterveningResults) {
|
||||
this.deleteInterveningResults = deleteInterveningResults;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
snapshotId = in.readString();
|
||||
deleteInterveningResults = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeString(snapshotId);
|
||||
out.writeBoolean(deleteInterveningResults);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId);
|
||||
builder.field(DELETE_INTERVENING.getPreferredName(), deleteInterveningResults);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, snapshotId, deleteInterveningResults);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) && Objects.equals(snapshotId, other.snapshotId)
|
||||
&& Objects.equals(deleteInterveningResults, other.deleteInterveningResults);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client) {
|
||||
super(client, INSTANCE, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse implements StatusToXContentObject {
|
||||
|
||||
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
|
||||
private static final ParseField MODEL = new ParseField("model");
|
||||
private ModelSnapshot model;
|
||||
|
||||
Response() {
|
||||
|
||||
}
|
||||
|
||||
public Response(ModelSnapshot modelSnapshot) {
|
||||
super(true);
|
||||
model = modelSnapshot;
|
||||
}
|
||||
|
||||
public ModelSnapshot getModel() {
|
||||
return model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
model = new ModelSnapshot(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
model.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return RestStatus.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ACKNOWLEDGED.getPreferredName(), true);
|
||||
builder.field(MODEL.getPreferredName());
|
||||
builder = model.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(model, other.model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
|
||||
|
||||
private final Client client;
|
||||
private final JobManager jobManager;
|
||||
private final JobProvider jobProvider;
|
||||
private final JobDataCountsPersister jobDataCountsPersister;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager, JobProvider jobProvider,
|
||||
ClusterService clusterService, Client client, JobDataCountsPersister jobDataCountsPersister) {
|
||||
super(settings, NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.client = client;
|
||||
this.jobManager = jobManager;
|
||||
this.jobProvider = jobProvider;
|
||||
this.jobDataCountsPersister = jobDataCountsPersister;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
logger.debug("Received request to revert to snapshot id '{}' for job '{}', deleting intervening results: {}",
|
||||
request.getSnapshotId(), request.getJobId(), request.getDeleteInterveningResults());
|
||||
|
||||
QueryPage<Job> job = jobManager.getJob(request.getJobId(), clusterService.state());
|
||||
Allocation allocation = jobManager.getJobAllocation(request.getJobId());
|
||||
if (job.count() > 0 && allocation.getState().equals(JobState.CLOSED) == false) {
|
||||
throw ExceptionsHelper.conflictStatusException(Messages.getMessage(Messages.REST_JOB_NOT_CLOSED_REVERT));
|
||||
}
|
||||
|
||||
getModelSnapshot(request, jobProvider, modelSnapshot -> {
|
||||
ActionListener<Response> wrappedListener = listener;
|
||||
if (request.getDeleteInterveningResults()) {
|
||||
wrappedListener = wrapDeleteOldDataListener(wrappedListener, modelSnapshot, request.getJobId());
|
||||
wrappedListener = wrapRevertDataCountsListener(wrappedListener, modelSnapshot, request.getJobId());
|
||||
}
|
||||
jobManager.revertSnapshot(request, wrappedListener, modelSnapshot);
|
||||
}, listener::onFailure);
|
||||
}
|
||||
|
||||
private void getModelSnapshot(Request request, JobProvider provider, Consumer<ModelSnapshot> handler,
|
||||
Consumer<Exception> errorHandler) {
|
||||
logger.info("Reverting to snapshot '" + request.getSnapshotId() + "'");
|
||||
|
||||
provider.modelSnapshots(request.getJobId(), 0, 1, null, null,
|
||||
ModelSnapshot.TIMESTAMP.getPreferredName(), true, request.getSnapshotId(), request.getDescription(),
|
||||
page -> {
|
||||
List<ModelSnapshot> revertCandidates = page.results();
|
||||
if (revertCandidates == null || revertCandidates.isEmpty()) {
|
||||
throw new ResourceNotFoundException(
|
||||
Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, request.getJobId()));
|
||||
}
|
||||
ModelSnapshot modelSnapshot = revertCandidates.get(0);
|
||||
|
||||
// The quantiles can be large, and totally dominate the output -
|
||||
// it's clearer to remove them
|
||||
modelSnapshot.setQuantiles(null);
|
||||
handler.accept(modelSnapshot);
|
||||
}, errorHandler);
|
||||
}
|
||||
|
||||
private ActionListener<RevertModelSnapshotAction.Response> wrapDeleteOldDataListener(
|
||||
ActionListener<RevertModelSnapshotAction.Response> listener,
|
||||
ModelSnapshot modelSnapshot, String jobId) {
|
||||
|
||||
// If we need to delete buckets that occurred after the snapshot, we
|
||||
// wrap the listener with one that invokes the OldDataRemover on
|
||||
// acknowledged responses
|
||||
return ActionListener.wrap(response -> {
|
||||
if (response.isAcknowledged()) {
|
||||
Date deleteAfter = modelSnapshot.getLatestResultTimeStamp();
|
||||
logger.debug("Removing intervening records: last record: " + deleteAfter + ", last result: "
|
||||
+ modelSnapshot.getLatestResultTimeStamp());
|
||||
|
||||
logger.info("Deleting results after '" + deleteAfter + "'");
|
||||
|
||||
// NORELEASE: JobDataDeleter is basically delete-by-query.
|
||||
// We should replace this whole abstraction with DBQ eventually
|
||||
JobDataDeleter dataDeleter = new JobDataDeleter(client, jobId);
|
||||
dataDeleter.deleteResultsFromTime(deleteAfter.getTime() + 1, new ActionListener<Boolean>() {
|
||||
@Override
|
||||
public void onResponse(Boolean success) {
|
||||
dataDeleter.commit(ActionListener.wrap(
|
||||
bulkItemResponses -> {listener.onResponse(response);},
|
||||
listener::onFailure));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, listener::onFailure);
|
||||
}
|
||||
|
||||
private ActionListener<RevertModelSnapshotAction.Response> wrapRevertDataCountsListener(
|
||||
ActionListener<RevertModelSnapshotAction.Response> listener,
|
||||
ModelSnapshot modelSnapshot, String jobId) {
|
||||
|
||||
|
||||
return ActionListener.wrap(response -> {
|
||||
if (response.isAcknowledged()) {
|
||||
jobProvider.dataCounts(jobId, counts -> {
|
||||
counts.setLatestRecordTimeStamp(modelSnapshot.getLatestRecordTimeStamp());
|
||||
jobDataCountsPersister.persistDataCounts(jobId, counts, new ActionListener<Boolean>() {
|
||||
@Override
|
||||
public void onResponse(Boolean aBoolean) {
|
||||
listener.onResponse(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}, listener::onFailure);
|
||||
}
|
||||
}, listener::onFailure);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,292 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobRunner;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedState;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.persistent.PersistentActionRegistry;
|
||||
import org.elasticsearch.xpack.persistent.PersistentActionRequest;
|
||||
import org.elasticsearch.xpack.persistent.PersistentActionResponse;
|
||||
import org.elasticsearch.xpack.persistent.PersistentActionService;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTask;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
|
||||
import org.elasticsearch.xpack.persistent.TransportPersistentAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class StartDatafeedAction
|
||||
extends Action<StartDatafeedAction.Request, PersistentActionResponse, StartDatafeedAction.RequestBuilder> {
|
||||
|
||||
public static final ParseField START_TIME = new ParseField("start");
|
||||
public static final ParseField END_TIME = new ParseField("end");
|
||||
|
||||
public static final StartDatafeedAction INSTANCE = new StartDatafeedAction();
|
||||
public static final String NAME = "cluster:admin/ml/datafeeds/start";
|
||||
|
||||
private StartDatafeedAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PersistentActionResponse newResponse() {
|
||||
return new PersistentActionResponse();
|
||||
}
|
||||
|
||||
public static class Request extends PersistentActionRequest implements ToXContent {
|
||||
|
||||
public static ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, datafeedId) -> request.datafeedId = datafeedId, DatafeedConfig.ID);
|
||||
PARSER.declareLong((request, startTime) -> request.startTime = startTime, START_TIME);
|
||||
PARSER.declareLong(Request::setEndTime, END_TIME);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String datafeedId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (datafeedId != null) {
|
||||
request.datafeedId = datafeedId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private String datafeedId;
|
||||
private long startTime;
|
||||
private Long endTime;
|
||||
|
||||
public Request(String datafeedId, long startTime) {
|
||||
this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName());
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public Request(StreamInput in) throws IOException {
|
||||
readFrom(in);
|
||||
}
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public Long getEndTime() {
|
||||
return endTime;
|
||||
}
|
||||
|
||||
public void setEndTime(Long endTime) {
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
|
||||
return new DatafeedTask(id, type, action, parentTaskId, datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
datafeedId = in.readString();
|
||||
startTime = in.readVLong();
|
||||
endTime = in.readOptionalLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(datafeedId);
|
||||
out.writeVLong(startTime);
|
||||
out.writeOptionalLong(endTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId);
|
||||
builder.field(START_TIME.getPreferredName(), startTime);
|
||||
if (endTime != null) {
|
||||
builder.field(END_TIME.getPreferredName(), endTime);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId, startTime, endTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(datafeedId, other.datafeedId) &&
|
||||
Objects.equals(startTime, other.startTime) &&
|
||||
Objects.equals(endTime, other.endTime);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, PersistentActionResponse, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, StartDatafeedAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class DatafeedTask extends PersistentTask {
|
||||
|
||||
private volatile DatafeedJobRunner.Holder holder;
|
||||
|
||||
public DatafeedTask(long id, String type, String action, TaskId parentTaskId, String datafeedId) {
|
||||
super(id, type, action, "datafeed-" + datafeedId, parentTaskId);
|
||||
}
|
||||
|
||||
public void setHolder(DatafeedJobRunner.Holder holder) {
|
||||
this.holder = holder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean shouldCancelChildrenOnCancellation() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onCancelled() {
|
||||
stop();
|
||||
}
|
||||
|
||||
/* public for testing */
|
||||
public void stop() {
|
||||
if (holder == null) {
|
||||
throw new IllegalStateException("task cancel ran before datafeed runner assigned the holder");
|
||||
}
|
||||
holder.stop("cancel", null);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportPersistentAction<Request> {
|
||||
|
||||
private final DatafeedJobRunner datafeedJobRunner;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
|
||||
PersistentActionService persistentActionService, PersistentActionRegistry persistentActionRegistry,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
DatafeedJobRunner datafeedJobRunner) {
|
||||
super(settings, NAME, false, threadPool, transportService, persistentActionService, persistentActionRegistry,
|
||||
actionFilters, indexNameExpressionResolver, Request::new, ThreadPool.Names.MANAGEMENT);
|
||||
this.datafeedJobRunner = datafeedJobRunner;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate(Request request, ClusterState clusterState) {
|
||||
MlMetadata mlMetadata = clusterState.metaData().custom(MlMetadata.TYPE);
|
||||
StartDatafeedAction.validate(request.getDatafeedId(), mlMetadata);
|
||||
PersistentTasksInProgress persistentTasksInProgress = clusterState.custom(PersistentTasksInProgress.TYPE);
|
||||
if (persistentTasksInProgress == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
Predicate<PersistentTasksInProgress.PersistentTaskInProgress<?>> predicate = taskInProgress -> {
|
||||
Request storedRequest = (Request) taskInProgress.getRequest();
|
||||
return storedRequest.getDatafeedId().equals(request.getDatafeedId());
|
||||
};
|
||||
if (persistentTasksInProgress.tasksExist(NAME, predicate)) {
|
||||
throw new ElasticsearchStatusException("datafeed already started, expected datafeed state [{}], but got [{}]",
|
||||
RestStatus.CONFLICT, DatafeedState.STOPPED, DatafeedState.STARTED);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void nodeOperation(PersistentTask task, Request request, ActionListener<TransportResponse.Empty> listener) {
|
||||
DatafeedTask datafeedTask = (DatafeedTask) task;
|
||||
datafeedJobRunner.run(request.getDatafeedId(), request.getStartTime(), request.getEndTime(),
|
||||
datafeedTask,
|
||||
(error) -> {
|
||||
if (error != null) {
|
||||
listener.onFailure(error);
|
||||
} else {
|
||||
listener.onResponse(TransportResponse.Empty.INSTANCE);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static void validate(String datafeedId, MlMetadata mlMetadata) {
|
||||
DatafeedConfig datafeed = mlMetadata.getDatafeed(datafeedId);
|
||||
if (datafeed == null) {
|
||||
throw ExceptionsHelper.missingDatafeedException(datafeedId);
|
||||
}
|
||||
Job job = mlMetadata.getJobs().get(datafeed.getJobId());
|
||||
if (job == null) {
|
||||
throw ExceptionsHelper.missingJobException(datafeed.getJobId());
|
||||
}
|
||||
Allocation allocation = mlMetadata.getAllocations().get(datafeed.getJobId());
|
||||
if (allocation.getState() != JobState.OPENED) {
|
||||
throw new ElasticsearchStatusException("cannot start datafeed, expected job state [{}], but got [{}]",
|
||||
RestStatus.CONFLICT, JobState.OPENED, allocation.getState());
|
||||
}
|
||||
DatafeedJobValidator.validate(datafeed, job);
|
||||
}
|
||||
}
|
@ -0,0 +1,178 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedState;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
|
||||
import org.elasticsearch.xpack.persistent.RemovePersistentTaskAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class StopDatafeedAction
|
||||
extends Action<StopDatafeedAction.Request, RemovePersistentTaskAction.Response, StopDatafeedAction.RequestBuilder> {
|
||||
|
||||
public static final StopDatafeedAction INSTANCE = new StopDatafeedAction();
|
||||
public static final String NAME = "cluster:admin/ml/datafeeds/stop";
|
||||
|
||||
private StopDatafeedAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RemovePersistentTaskAction.Response newResponse() {
|
||||
return new RemovePersistentTaskAction.Response();
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeRequest<Request> {
|
||||
|
||||
private String datafeedId;
|
||||
|
||||
public Request(String jobId) {
|
||||
this.datafeedId = ExceptionsHelper.requireNonNull(jobId, DatafeedConfig.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public String getDatafeedId() {
|
||||
return datafeedId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
datafeedId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(datafeedId, other.datafeedId);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, RemovePersistentTaskAction.Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, StopDatafeedAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, RemovePersistentTaskAction.Response> {
|
||||
|
||||
private final RemovePersistentTaskAction.TransportAction removePersistentTaskAction;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
ClusterService clusterService, RemovePersistentTaskAction.TransportAction removePersistentTaskAction) {
|
||||
super(settings, StopDatafeedAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
this.removePersistentTaskAction = removePersistentTaskAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RemovePersistentTaskAction.Response newResponse() {
|
||||
return new RemovePersistentTaskAction.Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state,
|
||||
ActionListener<RemovePersistentTaskAction.Response> listener) throws Exception {
|
||||
String datafeedId = request.getDatafeedId();
|
||||
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
|
||||
validate(datafeedId, mlMetadata);
|
||||
|
||||
PersistentTasksInProgress tasksInProgress = state.custom(PersistentTasksInProgress.TYPE);
|
||||
if (tasksInProgress != null) {
|
||||
for (PersistentTaskInProgress<?> taskInProgress : tasksInProgress.findTasks(StartDatafeedAction.NAME, p -> true)) {
|
||||
StartDatafeedAction.Request storedRequest = (StartDatafeedAction.Request) taskInProgress.getRequest();
|
||||
if (storedRequest.getDatafeedId().equals(datafeedId)) {
|
||||
RemovePersistentTaskAction.Request cancelTasksRequest = new RemovePersistentTaskAction.Request();
|
||||
cancelTasksRequest.setTaskId(taskInProgress.getId());
|
||||
removePersistentTaskAction.execute(cancelTasksRequest, listener);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
listener.onFailure(new ElasticsearchStatusException("datafeed already stopped, expected datafeed state [{}], but got [{}]",
|
||||
RestStatus.CONFLICT, DatafeedState.STARTED, DatafeedState.STOPPED));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static void validate(String datafeedId, MlMetadata mlMetadata) {
|
||||
DatafeedConfig datafeed = mlMetadata.getDatafeed(datafeedId);
|
||||
if (datafeed == null) {
|
||||
throw new ResourceNotFoundException(Messages.getMessage(Messages.DATAFEED_NOT_FOUND, datafeedId));
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,133 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||
import org.elasticsearch.action.support.tasks.TransportTasksAction;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* Base class that redirects a request to a node where the job task is running.
|
||||
*/
|
||||
// TODO: Hacking around here with TransportTasksAction. Ideally we should have another base class in core that
|
||||
// redirects to a single node only
|
||||
public abstract class TransportJobTaskAction<OperationTask extends Task, Request extends TransportJobTaskAction.JobTaskRequest<Request>,
|
||||
Response extends BaseTasksResponse & Writeable> extends TransportTasksAction<OperationTask, Request, Response, Response> {
|
||||
|
||||
protected final JobManager jobManager;
|
||||
protected final AutodetectProcessManager processManager;
|
||||
private final Function<Request, String> jobIdFromRequest;
|
||||
|
||||
TransportJobTaskAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService,
|
||||
TransportService transportService, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, Supplier<Request> requestSupplier,
|
||||
Supplier<Response> responseSupplier, String nodeExecutor, JobManager jobManager,
|
||||
AutodetectProcessManager processManager, Function<Request, String> jobIdFromRequest) {
|
||||
super(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
|
||||
requestSupplier, responseSupplier, nodeExecutor);
|
||||
this.jobManager = jobManager;
|
||||
this.processManager = processManager;
|
||||
this.jobIdFromRequest = jobIdFromRequest;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse(Request request, List<Response> tasks, List<TaskOperationFailure> taskOperationFailures,
|
||||
List<FailedNodeException> failedNodeExceptions) {
|
||||
// no need to accumulate sub responses, since we only perform an operation on one task only
|
||||
// not ideal, but throwing exceptions here works, because higher up the stack there is a try-catch block delegating to
|
||||
// the actionlistener's onFailure
|
||||
if (tasks.isEmpty()) {
|
||||
if (taskOperationFailures.isEmpty() == false) {
|
||||
throw new ElasticsearchException(taskOperationFailures.get(0).getCause());
|
||||
} else if (failedNodeExceptions.isEmpty() == false) {
|
||||
throw new ElasticsearchException(failedNodeExceptions.get(0).getCause());
|
||||
} else {
|
||||
// the same validation that exists in AutodetectProcessManager#processData(...) and flush(...) methods
|
||||
// is required here too because if the job hasn't been opened yet then no task exist for it yet and then
|
||||
// #taskOperation(...) method will not be invoked, returning an empty result to the client.
|
||||
// This ensures that we return an understandable error:
|
||||
String jobId = jobIdFromRequest.apply(request);
|
||||
jobManager.getJobOrThrowIfUnknown(jobId);
|
||||
Allocation allocation = jobManager.getJobAllocation(jobId);
|
||||
if (allocation.getState() != JobState.OPENED) {
|
||||
throw new ElasticsearchStatusException("job [" + jobId + "] state is [" + allocation.getState() +
|
||||
"], but must be [" + JobState.OPENED + "] to perform requested action", RestStatus.CONFLICT);
|
||||
} else {
|
||||
throw new IllegalStateException("No errors or response");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (tasks.size() > 1) {
|
||||
throw new IllegalStateException("Expected one node level response, but got [" + tasks.size() + "]");
|
||||
}
|
||||
return tasks.get(0);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean accumulateExceptions() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public static class JobTaskRequest<R extends JobTaskRequest<R>> extends BaseTasksRequest<R> {
|
||||
|
||||
String jobId;
|
||||
|
||||
JobTaskRequest() {
|
||||
}
|
||||
|
||||
JobTaskRequest(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean match(Task task) {
|
||||
return InternalOpenJobAction.JobTask.match(task, jobId);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,198 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobUpdate;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class UpdateJobAction extends Action<UpdateJobAction.Request, PutJobAction.Response, UpdateJobAction.RequestBuilder> {
|
||||
public static final UpdateJobAction INSTANCE = new UpdateJobAction();
|
||||
public static final String NAME = "cluster:admin/ml/job/update";
|
||||
|
||||
private UpdateJobAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateJobAction.RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new UpdateJobAction.RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PutJobAction.Response newResponse() {
|
||||
return new PutJobAction.Response();
|
||||
}
|
||||
|
||||
public static class Request extends AcknowledgedRequest<UpdateJobAction.Request> implements ToXContent {
|
||||
|
||||
public static UpdateJobAction.Request parseRequest(String jobId, XContentParser parser) {
|
||||
JobUpdate update = JobUpdate.PARSER.apply(parser, null);
|
||||
return new UpdateJobAction.Request(jobId, update);
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private JobUpdate update;
|
||||
|
||||
public Request(String jobId, JobUpdate update) {
|
||||
this.jobId = jobId;
|
||||
this.update = update;
|
||||
}
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public JobUpdate getJobUpdate() {
|
||||
return update;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
update = new JobUpdate(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
update.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
update.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
UpdateJobAction.Request request = (UpdateJobAction.Request) o;
|
||||
return Objects.equals(update, request.update);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(update);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, PutJobAction.Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, UpdateJobAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<UpdateJobAction.Request, PutJobAction.Response> {
|
||||
|
||||
private final JobManager jobManager;
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
JobManager jobManager, Client client) {
|
||||
super(settings, UpdateJobAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, UpdateJobAction.Request::new);
|
||||
this.jobManager = jobManager;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PutJobAction.Response newResponse() {
|
||||
return new PutJobAction.Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state,
|
||||
ActionListener<PutJobAction.Response> listener) throws Exception {
|
||||
if (request.getJobId().equals(Job.ALL)) {
|
||||
throw new IllegalArgumentException("Job Id " + Job.ALL + " cannot be for update");
|
||||
}
|
||||
|
||||
ActionListener<PutJobAction.Response> wrappedListener = listener;
|
||||
if (request.getJobUpdate().isAutodetectProcessUpdate()) {
|
||||
wrappedListener = ActionListener.wrap(
|
||||
response -> updateProcess(request, response, listener),
|
||||
listener::onFailure);
|
||||
}
|
||||
|
||||
jobManager.updateJob(request.getJobId(), request.getJobUpdate(), request, wrappedListener);
|
||||
}
|
||||
|
||||
private void updateProcess(Request request, PutJobAction.Response updateConfigResponse,
|
||||
ActionListener<PutJobAction.Response> listener) {
|
||||
|
||||
UpdateProcessAction.Request updateProcessRequest = new UpdateProcessAction.Request(request.getJobId(),
|
||||
request.getJobUpdate().getModelDebugConfig(), request.getJobUpdate().getDetectorUpdates());
|
||||
client.execute(UpdateProcessAction.INSTANCE, updateProcessRequest, new ActionListener<UpdateProcessAction.Response>() {
|
||||
@Override
|
||||
public void onResponse(UpdateProcessAction.Response response) {
|
||||
listener.onResponse(updateConfigResponse);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(UpdateJobAction.Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,194 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class UpdateJobStateAction
|
||||
extends Action<UpdateJobStateAction.Request, UpdateJobStateAction.Response, UpdateJobStateAction.RequestBuilder> {
|
||||
|
||||
public static final UpdateJobStateAction INSTANCE = new UpdateJobStateAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/state/update";
|
||||
|
||||
private UpdateJobStateAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends AcknowledgedRequest<Request> {
|
||||
|
||||
private String jobId;
|
||||
private JobState state;
|
||||
private String reason;
|
||||
|
||||
public Request(String jobId, JobState state) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.state = ExceptionsHelper.requireNonNull(state, Allocation.STATE.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
public JobState getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void setState(JobState state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public String getReason() {
|
||||
return reason;
|
||||
}
|
||||
|
||||
public void setReason(String reason) {
|
||||
this.reason = reason;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
state = JobState.fromStream(in);
|
||||
reason = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
state.writeTo(out);
|
||||
out.writeOptionalString(reason);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
UpdateJobStateAction.Request other = (UpdateJobStateAction.Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) && Objects.equals(state, other.state);
|
||||
}
|
||||
}
|
||||
|
||||
static class RequestBuilder extends MasterNodeOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, UpdateJobStateAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse {
|
||||
|
||||
public Response(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
private Response() {}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportMasterNodeAction<Request, Response> {
|
||||
|
||||
private final JobManager jobManager;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
JobManager jobManager) {
|
||||
super(settings, UpdateJobStateAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, Request::new);
|
||||
this.jobManager = jobManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
|
||||
jobManager.setJobState(request, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,305 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class UpdateModelSnapshotAction extends
|
||||
Action<UpdateModelSnapshotAction.Request, UpdateModelSnapshotAction.Response,
|
||||
UpdateModelSnapshotAction.RequestBuilder> {
|
||||
|
||||
public static final UpdateModelSnapshotAction INSTANCE = new UpdateModelSnapshotAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/model_snapshots/update";
|
||||
|
||||
private UpdateModelSnapshotAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateModelSnapshotAction.RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UpdateModelSnapshotAction.Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID);
|
||||
PARSER.declareString((request, snapshotId) -> request.snapshotId = snapshotId, ModelSnapshot.SNAPSHOT_ID);
|
||||
PARSER.declareString((request, description) -> request.description = description, ModelSnapshot.DESCRIPTION);
|
||||
}
|
||||
|
||||
public static Request parseRequest(String jobId, String snapshotId, XContentParser parser) {
|
||||
Request request = PARSER.apply(parser, null);
|
||||
if (jobId != null) {
|
||||
request.jobId = jobId;
|
||||
}
|
||||
if (snapshotId != null) {
|
||||
request.snapshotId = snapshotId;
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
private String jobId;
|
||||
private String snapshotId;
|
||||
private String description;
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId, String snapshotId, String description) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, ModelSnapshot.SNAPSHOT_ID.getPreferredName());
|
||||
this.description = ExceptionsHelper.requireNonNull(description, ModelSnapshot.DESCRIPTION.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public String getSnapshotId() {
|
||||
return snapshotId;
|
||||
}
|
||||
|
||||
public String getDescriptionString() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
jobId = in.readString();
|
||||
snapshotId = in.readString();
|
||||
description = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(jobId);
|
||||
out.writeString(snapshotId);
|
||||
out.writeString(description);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId);
|
||||
builder.field(ModelSnapshot.DESCRIPTION.getPreferredName(), description);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, snapshotId, description);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(jobId, other.jobId) && Objects.equals(snapshotId, other.snapshotId)
|
||||
&& Objects.equals(description, other.description);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements StatusToXContentObject {
|
||||
|
||||
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
|
||||
private static final ParseField MODEL = new ParseField("model");
|
||||
|
||||
private ModelSnapshot model;
|
||||
|
||||
Response() {
|
||||
|
||||
}
|
||||
|
||||
public Response(ModelSnapshot modelSnapshot) {
|
||||
model = modelSnapshot;
|
||||
}
|
||||
|
||||
public ModelSnapshot getModel() {
|
||||
return model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
model = new ModelSnapshot(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
model.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return RestStatus.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ACKNOWLEDGED.getPreferredName(), true);
|
||||
builder.field(MODEL.getPreferredName());
|
||||
builder = model.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
return Objects.equals(model, other.model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client, UpdateModelSnapshotAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
private final JobManager jobManager;
|
||||
private final JobProvider jobProvider;
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, JobManager jobManager, JobProvider jobProvider) {
|
||||
super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new);
|
||||
this.jobManager = jobManager;
|
||||
this.jobProvider = jobProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
logger.debug("Received request to change model snapshot description using '" + request.getDescriptionString()
|
||||
+ "' for snapshot ID '" + request.getSnapshotId() + "' for job '" + request.getJobId() + "'");
|
||||
getChangeCandidates(request, changeCandidates -> {
|
||||
checkForClashes(request, aVoid -> {
|
||||
if (changeCandidates.size() > 1) {
|
||||
logger.warn("More than one model found for [{}: {}, {}: {}] tuple.", Job.ID.getPreferredName(), request.getJobId(),
|
||||
ModelSnapshot.SNAPSHOT_ID.getPreferredName(), request.getSnapshotId());
|
||||
}
|
||||
ModelSnapshot modelSnapshot = changeCandidates.get(0);
|
||||
modelSnapshot.setDescription(request.getDescriptionString());
|
||||
jobManager.updateModelSnapshot(modelSnapshot, b -> {
|
||||
modelSnapshot.setDescription(request.getDescriptionString());
|
||||
// The quantiles can be large, and totally dominate the output -
|
||||
// it's clearer to remove them
|
||||
modelSnapshot.setQuantiles(null);
|
||||
listener.onResponse(new Response(modelSnapshot));
|
||||
}, listener::onFailure);
|
||||
}, listener::onFailure);
|
||||
}, listener::onFailure);
|
||||
}
|
||||
|
||||
private void getChangeCandidates(Request request, Consumer<List<ModelSnapshot>> handler, Consumer<Exception> errorHandler) {
|
||||
getModelSnapshots(request.getJobId(), request.getSnapshotId(), null,
|
||||
changeCandidates -> {
|
||||
if (changeCandidates == null || changeCandidates.isEmpty()) {
|
||||
errorHandler.accept(new ResourceNotFoundException(
|
||||
Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, request.getJobId())));
|
||||
} else {
|
||||
handler.accept(changeCandidates);
|
||||
}
|
||||
}, errorHandler);
|
||||
}
|
||||
|
||||
private void checkForClashes(Request request, Consumer<Void> handler, Consumer<Exception> errorHandler) {
|
||||
getModelSnapshots(request.getJobId(), null, request.getDescriptionString(), clashCandidates -> {
|
||||
if (clashCandidates != null && !clashCandidates.isEmpty()) {
|
||||
errorHandler.accept(new IllegalArgumentException(Messages.getMessage(
|
||||
Messages.REST_DESCRIPTION_ALREADY_USED, request.getDescriptionString(), request.getJobId())));
|
||||
} else {
|
||||
handler.accept(null);
|
||||
}
|
||||
}, errorHandler);
|
||||
}
|
||||
|
||||
private void getModelSnapshots(String jobId, String snapshotId, String description,
|
||||
Consumer<List<ModelSnapshot>> handler, Consumer<Exception> errorHandler) {
|
||||
jobProvider.modelSnapshots(jobId, 0, 1, null, null, null, true, snapshotId, description,
|
||||
page -> handler.accept(page.results()), errorHandler);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,216 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.MlPlugin;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobUpdate;
|
||||
import org.elasticsearch.xpack.ml.job.config.ModelDebugConfig;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class UpdateProcessAction extends
|
||||
Action<UpdateProcessAction.Request, UpdateProcessAction.Response, UpdateProcessAction.RequestBuilder> {
|
||||
|
||||
|
||||
public static final UpdateProcessAction INSTANCE = new UpdateProcessAction();
|
||||
public static final String NAME = "cluster:admin/ml/job/update/process";
|
||||
|
||||
private UpdateProcessAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
RequestBuilder(ElasticsearchClient client, UpdateProcessAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends BaseTasksResponse implements StatusToXContentObject, Writeable {
|
||||
|
||||
private boolean isUpdated;
|
||||
|
||||
private Response() {
|
||||
this.isUpdated = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
isUpdated = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(isUpdated);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return RestStatus.ACCEPTED;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("updated", isUpdated);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(isUpdated);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Response other = (Response) obj;
|
||||
|
||||
return this.isUpdated == other.isUpdated;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Request extends TransportJobTaskAction.JobTaskRequest<Request> {
|
||||
|
||||
private ModelDebugConfig modelDebugConfig;
|
||||
private List<JobUpdate.DetectorUpdate> detectorUpdates;
|
||||
|
||||
Request() {
|
||||
}
|
||||
|
||||
public Request(String jobId, ModelDebugConfig modelDebugConfig, List<JobUpdate.DetectorUpdate> detectorUpdates) {
|
||||
super(jobId);
|
||||
this.modelDebugConfig = modelDebugConfig;
|
||||
this.detectorUpdates = detectorUpdates;
|
||||
}
|
||||
|
||||
public ModelDebugConfig getModelDebugConfig() {
|
||||
return modelDebugConfig;
|
||||
}
|
||||
|
||||
public List<JobUpdate.DetectorUpdate> getDetectorUpdates() {
|
||||
return detectorUpdates;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
modelDebugConfig = in.readOptionalWriteable(ModelDebugConfig::new);
|
||||
if (in.readBoolean()) {
|
||||
in.readList(JobUpdate.DetectorUpdate::new);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeOptionalWriteable(modelDebugConfig);
|
||||
boolean hasDetectorUpdates = detectorUpdates != null;
|
||||
out.writeBoolean(hasDetectorUpdates);
|
||||
if (hasDetectorUpdates) {
|
||||
out.writeList(detectorUpdates);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(getJobId(), modelDebugConfig, detectorUpdates);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
|
||||
return Objects.equals(getJobId(), other.getJobId()) &&
|
||||
Objects.equals(modelDebugConfig, other.modelDebugConfig) &&
|
||||
Objects.equals(detectorUpdates, other.detectorUpdates);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportJobTaskAction<InternalOpenJobAction.JobTask, Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
JobManager jobManager, AutodetectProcessManager processManager) {
|
||||
super(settings, NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
|
||||
Request::new, Response::new, MlPlugin.THREAD_POOL_NAME, jobManager, processManager, Request::getJobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response readTaskResponse(StreamInput in) throws IOException {
|
||||
Response response = new Response();
|
||||
response.readFrom(in);
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void taskOperation(Request request, InternalOpenJobAction.JobTask task, ActionListener<Response> listener) {
|
||||
threadPool.executor(MlPlugin.THREAD_POOL_NAME).execute(() -> {
|
||||
try {
|
||||
if (request.getModelDebugConfig() != null) {
|
||||
processManager.writeUpdateModelDebugMessage(request.getJobId(), request.getModelDebugConfig());
|
||||
}
|
||||
if (request.getDetectorUpdates() != null) {
|
||||
for (JobUpdate.DetectorUpdate update : request.getDetectorUpdates()) {
|
||||
processManager.writeUpdateDetectorRulesMessage(request.getJobId(), update.getIndex(), update.getRules());
|
||||
}
|
||||
}
|
||||
|
||||
listener.onResponse(new Response());
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,164 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Detector;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ValidateDetectorAction
|
||||
extends Action<ValidateDetectorAction.Request, ValidateDetectorAction.Response, ValidateDetectorAction.RequestBuilder> {
|
||||
|
||||
public static final ValidateDetectorAction INSTANCE = new ValidateDetectorAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/validate/detector";
|
||||
|
||||
protected ValidateDetectorAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, INSTANCE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
protected RequestBuilder(ElasticsearchClient client, ValidateDetectorAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
private Detector detector;
|
||||
|
||||
// NORELEASE this needs to change so the body is not directly the
|
||||
// detector but and object that contains a field for the detector
|
||||
public static Request parseRequest(XContentParser parser) {
|
||||
Detector detector = Detector.PARSER.apply(parser, null).build();
|
||||
return new Request(detector);
|
||||
}
|
||||
|
||||
Request() {
|
||||
this.detector = null;
|
||||
}
|
||||
|
||||
public Request(Detector detector) {
|
||||
this.detector = detector;
|
||||
}
|
||||
|
||||
public Detector getDetector() {
|
||||
return detector;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
detector.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
detector = new Detector(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
detector.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(detector);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(detector, other.detector);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse {
|
||||
|
||||
public Response() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Response(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, ValidateDetectorAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
|
||||
Request::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
listener.onResponse(new Response(true));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,164 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ValidateJobConfigAction
|
||||
extends Action<ValidateJobConfigAction.Request, ValidateJobConfigAction.Response, ValidateJobConfigAction.RequestBuilder> {
|
||||
|
||||
public static final ValidateJobConfigAction INSTANCE = new ValidateJobConfigAction();
|
||||
public static final String NAME = "cluster:admin/ml/anomaly_detectors/validate";
|
||||
|
||||
protected ValidateJobConfigAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new RequestBuilder(client, INSTANCE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
|
||||
|
||||
protected RequestBuilder(ElasticsearchClient client, ValidateJobConfigAction action) {
|
||||
super(client, action, new Request());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
private Job job;
|
||||
|
||||
public static Request parseRequest(XContentParser parser) {
|
||||
Job.Builder job = Job.PARSER.apply(parser, null);
|
||||
// When jobs are PUT their ID must be supplied in the URL - assume this will
|
||||
// be valid unless an invalid job ID is specified in the JSON to be validated
|
||||
return new Request(job.build(true, (job.getId() != null) ? job.getId() : "ok"));
|
||||
}
|
||||
|
||||
Request() {
|
||||
this.job = null;
|
||||
}
|
||||
|
||||
public Request(Job job) {
|
||||
this.job = job;
|
||||
}
|
||||
|
||||
public Job getJob() {
|
||||
return job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
job.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
job = new Job(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
job.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(job);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Request other = (Request) obj;
|
||||
return Objects.equals(job, other.job);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class Response extends AcknowledgedResponse {
|
||||
|
||||
public Response() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Response(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readAcknowledged(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<Request, Response> {
|
||||
|
||||
@Inject
|
||||
public TransportAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, ValidateJobConfigAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver,
|
||||
Request::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
listener.onResponse(new Response(true));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action.util;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PageParams extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public static final ParseField PAGE = new ParseField("page");
|
||||
public static final ParseField FROM = new ParseField("from");
|
||||
public static final ParseField SIZE = new ParseField("size");
|
||||
|
||||
public static final int DEFAULT_FROM = 0;
|
||||
public static final int DEFAULT_SIZE = 100;
|
||||
|
||||
|
||||
public static final ConstructingObjectParser<PageParams, Void> PARSER = new ConstructingObjectParser<>(
|
||||
PAGE.getPreferredName(), a -> new PageParams((int) a[0], (int) a[1]));
|
||||
|
||||
public static final int MAX_FROM_SIZE_SUM = 10000;
|
||||
|
||||
static {
|
||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), FROM);
|
||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), SIZE);
|
||||
}
|
||||
|
||||
private final int from;
|
||||
private final int size;
|
||||
|
||||
public PageParams(StreamInput in) throws IOException {
|
||||
this(in.readVInt(), in.readVInt());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(from);
|
||||
out.writeVInt(size);
|
||||
}
|
||||
|
||||
public PageParams() {
|
||||
this.from = DEFAULT_FROM;
|
||||
this.size = DEFAULT_SIZE;
|
||||
}
|
||||
|
||||
public PageParams(int from, int size) {
|
||||
if (from < 0) {
|
||||
throw new IllegalArgumentException("Parameter [" + FROM.getPreferredName() + "] cannot be < 0");
|
||||
}
|
||||
if (size < 0) {
|
||||
throw new IllegalArgumentException("Parameter [" + PageParams.SIZE.getPreferredName() + "] cannot be < 0");
|
||||
}
|
||||
if (from + size > MAX_FROM_SIZE_SUM) {
|
||||
throw new IllegalArgumentException("The sum of parameters [" + FROM.getPreferredName() + "] and ["
|
||||
+ PageParams.SIZE.getPreferredName() + "] cannot be higher than " + MAX_FROM_SIZE_SUM + ".");
|
||||
}
|
||||
this.from = from;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
public int getFrom() {
|
||||
return from;
|
||||
}
|
||||
|
||||
public int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FROM.getPreferredName(), from);
|
||||
builder.field(SIZE.getPreferredName(), size);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(from, size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
PageParams other = (PageParams) obj;
|
||||
return Objects.equals(from, other.from) &&
|
||||
Objects.equals(size, other.size);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.action.util;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Generic wrapper class for a page of query results and the total number of
|
||||
* query results.<br>
|
||||
* {@linkplain #count()} is the total number of results but that value may
|
||||
* not be equal to the actual length of the {@linkplain #results()} list if from
|
||||
* & take or some cursor was used in the database query.
|
||||
*/
|
||||
public final class QueryPage<T extends ToXContent & Writeable> extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public static final ParseField COUNT = new ParseField("count");
|
||||
public static final ParseField DEFAULT_RESULTS_FIELD = new ParseField("results_field");
|
||||
|
||||
private final ParseField resultsField;
|
||||
private final List<T> results;
|
||||
private final long count;
|
||||
|
||||
public QueryPage(List<T> results, long count, ParseField resultsField) {
|
||||
this.results = results;
|
||||
this.count = count;
|
||||
this.resultsField = ExceptionsHelper.requireNonNull(resultsField, DEFAULT_RESULTS_FIELD.getPreferredName());
|
||||
}
|
||||
|
||||
public QueryPage(StreamInput in, Reader<T> hitReader) throws IOException {
|
||||
resultsField = new ParseField(in.readString());
|
||||
results = in.readList(hitReader);
|
||||
count = in.readLong();
|
||||
}
|
||||
|
||||
public static ResourceNotFoundException emptyQueryPage(ParseField resultsField) {
|
||||
return new ResourceNotFoundException("Could not find requested " + resultsField.getPreferredName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(resultsField.getPreferredName());
|
||||
out.writeList(results);
|
||||
out.writeLong(count);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(COUNT.getPreferredName(), count);
|
||||
builder.field(resultsField.getPreferredName(), results);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public List<T> results() {
|
||||
return results;
|
||||
}
|
||||
|
||||
public long count() {
|
||||
return count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(results, count);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
QueryPage<T> other = (QueryPage<T>) obj;
|
||||
return Objects.equals(results, other.results) &&
|
||||
Objects.equals(count, other.count);
|
||||
}
|
||||
}
|
@ -0,0 +1,156 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* The description of how searches should be chunked.
|
||||
*/
|
||||
public class ChunkingConfig extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public static final ParseField MODE_FIELD = new ParseField("mode");
|
||||
public static final ParseField TIME_SPAN_FIELD = new ParseField("time_span");
|
||||
|
||||
public static final ConstructingObjectParser<ChunkingConfig, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"chunking_config", a -> new ChunkingConfig((Mode) a[0], (Long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return Mode.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, MODE_FIELD, ValueType.STRING);
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), TIME_SPAN_FIELD);
|
||||
}
|
||||
|
||||
private final Mode mode;
|
||||
private final Long timeSpan;
|
||||
|
||||
public ChunkingConfig(StreamInput in) throws IOException {
|
||||
mode = Mode.readFromStream(in);
|
||||
timeSpan = in.readOptionalLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
mode.writeTo(out);
|
||||
out.writeOptionalLong(timeSpan);
|
||||
}
|
||||
|
||||
ChunkingConfig(Mode mode, @Nullable Long timeSpan) {
|
||||
this.mode = ExceptionsHelper.requireNonNull(mode, MODE_FIELD.getPreferredName());
|
||||
this.timeSpan = timeSpan;
|
||||
if (mode == Mode.MANUAL) {
|
||||
if (timeSpan == null) {
|
||||
throw new IllegalArgumentException("when chunk mode is manual time_span is required");
|
||||
}
|
||||
if (timeSpan <= 0) {
|
||||
throw new IllegalArgumentException("chunk time_span has to be positive");
|
||||
}
|
||||
} else {
|
||||
if (timeSpan != null) {
|
||||
throw new IllegalArgumentException("chunk time_span may only be set when mode is manual");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Long getTimeSpan() {
|
||||
return timeSpan;
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return mode != Mode.OFF;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(MODE_FIELD.getPreferredName(), mode);
|
||||
if (timeSpan != null) {
|
||||
builder.field(TIME_SPAN_FIELD.getPreferredName(), timeSpan);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(mode, timeSpan);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ChunkingConfig other = (ChunkingConfig) obj;
|
||||
return Objects.equals(this.mode, other.mode) &&
|
||||
Objects.equals(this.timeSpan, other.timeSpan);
|
||||
}
|
||||
|
||||
public static ChunkingConfig newAuto() {
|
||||
return new ChunkingConfig(Mode.AUTO, null);
|
||||
}
|
||||
|
||||
public static ChunkingConfig newOff() {
|
||||
return new ChunkingConfig(Mode.OFF, null);
|
||||
}
|
||||
|
||||
public static ChunkingConfig newManual(long timeSpan) {
|
||||
return new ChunkingConfig(Mode.MANUAL, timeSpan);
|
||||
}
|
||||
|
||||
public enum Mode implements Writeable {
|
||||
AUTO, MANUAL, OFF;
|
||||
|
||||
public static Mode fromString(String value) {
|
||||
return Mode.valueOf(value.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static Mode readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown Mode ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,487 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.DomainSplitFunction;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.MlStrings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Datafeed configuration options. Describes where to proactively pull input
|
||||
* data from.
|
||||
* <p>
|
||||
* If a value has not been set it will be <code>null</code>. Object wrappers are
|
||||
* used around integral types and booleans so they can take <code>null</code>
|
||||
* values.
|
||||
*/
|
||||
public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements ToXContent {
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("datafeeds");
|
||||
|
||||
/**
|
||||
* The field name used to specify document counts in Elasticsearch
|
||||
* aggregations
|
||||
*/
|
||||
public static final String DOC_COUNT = "doc_count";
|
||||
|
||||
public static final ParseField ID = new ParseField("datafeed_id");
|
||||
public static final ParseField QUERY_DELAY = new ParseField("query_delay");
|
||||
public static final ParseField FREQUENCY = new ParseField("frequency");
|
||||
public static final ParseField INDEXES = new ParseField("indexes");
|
||||
public static final ParseField TYPES = new ParseField("types");
|
||||
public static final ParseField QUERY = new ParseField("query");
|
||||
public static final ParseField SCROLL_SIZE = new ParseField("scroll_size");
|
||||
public static final ParseField AGGREGATIONS = new ParseField("aggregations");
|
||||
public static final ParseField AGGS = new ParseField("aggs");
|
||||
public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields");
|
||||
public static final ParseField SOURCE = new ParseField("_source");
|
||||
public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config");
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("datafeed_config", Builder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setId, ID);
|
||||
PARSER.declareString(Builder::setJobId, Job.ID);
|
||||
PARSER.declareStringArray(Builder::setIndexes, INDEXES);
|
||||
PARSER.declareStringArray(Builder::setTypes, TYPES);
|
||||
PARSER.declareLong(Builder::setQueryDelay, QUERY_DELAY);
|
||||
PARSER.declareLong(Builder::setFrequency, FREQUENCY);
|
||||
PARSER.declareObject(Builder::setQuery,
|
||||
(p, c) -> new QueryParseContext(p).parseInnerQueryBuilder(), QUERY);
|
||||
PARSER.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(new QueryParseContext(p)),
|
||||
AGGREGATIONS);
|
||||
PARSER.declareObject(Builder::setAggregations,(p, c) -> AggregatorFactories.parseAggregators(new QueryParseContext(p)), AGGS);
|
||||
PARSER.declareObject(Builder::setScriptFields, (p, c) -> {
|
||||
List<SearchSourceBuilder.ScriptField> parsedScriptFields = new ArrayList<>();
|
||||
while (p.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
parsedScriptFields.add(new SearchSourceBuilder.ScriptField(new QueryParseContext(p)));
|
||||
}
|
||||
parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName));
|
||||
return parsedScriptFields;
|
||||
}, SCRIPT_FIELDS);
|
||||
PARSER.declareInt(Builder::setScrollSize, SCROLL_SIZE);
|
||||
PARSER.declareBoolean(Builder::setSource, SOURCE);
|
||||
PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, CHUNKING_CONFIG);
|
||||
}
|
||||
|
||||
private final String id;
|
||||
private final String jobId;
|
||||
|
||||
/**
|
||||
* The delay in seconds before starting to query a period of time
|
||||
*/
|
||||
private final Long queryDelay;
|
||||
|
||||
/**
|
||||
* The frequency in seconds with which queries are executed
|
||||
*/
|
||||
private final Long frequency;
|
||||
|
||||
private final List<String> indexes;
|
||||
private final List<String> types;
|
||||
private final QueryBuilder query;
|
||||
private final AggregatorFactories.Builder aggregations;
|
||||
private final List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
private final Integer scrollSize;
|
||||
private final boolean source;
|
||||
private final ChunkingConfig chunkingConfig;
|
||||
|
||||
private DatafeedConfig(String id, String jobId, Long queryDelay, Long frequency, List<String> indexes, List<String> types,
|
||||
QueryBuilder query, AggregatorFactories.Builder aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
|
||||
Integer scrollSize, boolean source, ChunkingConfig chunkingConfig) {
|
||||
this.id = id;
|
||||
this.jobId = jobId;
|
||||
this.queryDelay = queryDelay;
|
||||
this.frequency = frequency;
|
||||
this.indexes = indexes;
|
||||
this.types = types;
|
||||
this.query = query;
|
||||
this.aggregations = aggregations;
|
||||
this.scriptFields = scriptFields;
|
||||
this.scrollSize = scrollSize;
|
||||
this.source = source;
|
||||
this.chunkingConfig = chunkingConfig;
|
||||
}
|
||||
|
||||
public DatafeedConfig(StreamInput in) throws IOException {
|
||||
this.id = in.readString();
|
||||
this.jobId = in.readString();
|
||||
this.queryDelay = in.readOptionalLong();
|
||||
this.frequency = in.readOptionalLong();
|
||||
if (in.readBoolean()) {
|
||||
this.indexes = in.readList(StreamInput::readString);
|
||||
} else {
|
||||
this.indexes = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
this.types = in.readList(StreamInput::readString);
|
||||
} else {
|
||||
this.types = null;
|
||||
}
|
||||
this.query = in.readNamedWriteable(QueryBuilder.class);
|
||||
this.aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new);
|
||||
if (in.readBoolean()) {
|
||||
this.scriptFields = in.readList(SearchSourceBuilder.ScriptField::new);
|
||||
} else {
|
||||
this.scriptFields = null;
|
||||
}
|
||||
this.scrollSize = in.readOptionalVInt();
|
||||
this.source = in.readBoolean();
|
||||
this.chunkingConfig = in.readOptionalWriteable(ChunkingConfig::new);
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public Long getQueryDelay() {
|
||||
return queryDelay;
|
||||
}
|
||||
|
||||
public Long getFrequency() {
|
||||
return frequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* For the ELASTICSEARCH data source only, one or more indexes to search for
|
||||
* input data.
|
||||
*
|
||||
* @return The indexes to search, or <code>null</code> if not set.
|
||||
*/
|
||||
public List<String> getIndexes() {
|
||||
return indexes;
|
||||
}
|
||||
|
||||
/**
|
||||
* For the ELASTICSEARCH data source only, one or more types to search for
|
||||
* input data.
|
||||
*
|
||||
* @return The types to search, or <code>null</code> if not set.
|
||||
*/
|
||||
public List<String> getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public Integer getScrollSize() {
|
||||
return scrollSize;
|
||||
}
|
||||
|
||||
public boolean isSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public QueryBuilder getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
public AggregatorFactories.Builder getAggregations() {
|
||||
return aggregations;
|
||||
}
|
||||
|
||||
public List<SearchSourceBuilder.ScriptField> getScriptFields() {
|
||||
return scriptFields == null ? Collections.emptyList() : scriptFields;
|
||||
}
|
||||
|
||||
public ChunkingConfig getChunkingConfig() {
|
||||
return chunkingConfig;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(id);
|
||||
out.writeString(jobId);
|
||||
out.writeOptionalLong(queryDelay);
|
||||
out.writeOptionalLong(frequency);
|
||||
if (indexes != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeStringList(indexes);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (types != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeStringList(types);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeNamedWriteable(query);
|
||||
out.writeOptionalWriteable(aggregations);
|
||||
if (scriptFields != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeList(scriptFields);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeOptionalVInt(scrollSize);
|
||||
out.writeBoolean(source);
|
||||
out.writeOptionalWriteable(chunkingConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(ID.getPreferredName(), id);
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(QUERY_DELAY.getPreferredName(), queryDelay);
|
||||
if (frequency != null) {
|
||||
builder.field(FREQUENCY.getPreferredName(), frequency);
|
||||
}
|
||||
builder.field(INDEXES.getPreferredName(), indexes);
|
||||
builder.field(TYPES.getPreferredName(), types);
|
||||
builder.field(QUERY.getPreferredName(), query);
|
||||
if (aggregations != null) {
|
||||
builder.field(AGGREGATIONS.getPreferredName(), aggregations);
|
||||
}
|
||||
if (scriptFields != null) {
|
||||
builder.startObject(SCRIPT_FIELDS.getPreferredName());
|
||||
for (SearchSourceBuilder.ScriptField scriptField : scriptFields) {
|
||||
scriptField.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.field(SCROLL_SIZE.getPreferredName(), scrollSize);
|
||||
if (source) {
|
||||
builder.field(SOURCE.getPreferredName(), source);
|
||||
}
|
||||
if (chunkingConfig != null) {
|
||||
builder.field(CHUNKING_CONFIG.getPreferredName(), chunkingConfig);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* The lists of indexes and types are compared for equality but they are not
|
||||
* sorted first so this test could fail simply because the indexes and types
|
||||
* lists are in different orders.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other instanceof DatafeedConfig == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DatafeedConfig that = (DatafeedConfig) other;
|
||||
|
||||
return Objects.equals(this.id, that.id)
|
||||
&& Objects.equals(this.jobId, that.jobId)
|
||||
&& Objects.equals(this.frequency, that.frequency)
|
||||
&& Objects.equals(this.queryDelay, that.queryDelay)
|
||||
&& Objects.equals(this.indexes, that.indexes)
|
||||
&& Objects.equals(this.types, that.types)
|
||||
&& Objects.equals(this.query, that.query)
|
||||
&& Objects.equals(this.scrollSize, that.scrollSize)
|
||||
&& Objects.equals(this.aggregations, that.aggregations)
|
||||
&& Objects.equals(this.scriptFields, that.scriptFields)
|
||||
&& Objects.equals(this.source, that.source)
|
||||
&& Objects.equals(this.chunkingConfig, that.chunkingConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indexes, types, query, scrollSize, aggregations, scriptFields, source,
|
||||
chunkingConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private static final int DEFAULT_SCROLL_SIZE = 1000;
|
||||
private static final long DEFAULT_ELASTICSEARCH_QUERY_DELAY = 60L;
|
||||
|
||||
private String id;
|
||||
private String jobId;
|
||||
private Long queryDelay = DEFAULT_ELASTICSEARCH_QUERY_DELAY;
|
||||
private Long frequency;
|
||||
private List<String> indexes = Collections.emptyList();
|
||||
private List<String> types = Collections.emptyList();
|
||||
private QueryBuilder query = QueryBuilders.matchAllQuery();
|
||||
private AggregatorFactories.Builder aggregations;
|
||||
private List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
private Integer scrollSize = DEFAULT_SCROLL_SIZE;
|
||||
private boolean source = false;
|
||||
private ChunkingConfig chunkingConfig;
|
||||
|
||||
public Builder() {
|
||||
}
|
||||
|
||||
public Builder(String id, String jobId) {
|
||||
this();
|
||||
this.id = ExceptionsHelper.requireNonNull(id, ID.getPreferredName());
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public Builder(DatafeedConfig config) {
|
||||
this.id = config.id;
|
||||
this.jobId = config.jobId;
|
||||
this.queryDelay = config.queryDelay;
|
||||
this.frequency = config.frequency;
|
||||
this.indexes = config.indexes;
|
||||
this.types = config.types;
|
||||
this.query = config.query;
|
||||
this.aggregations = config.aggregations;
|
||||
this.scriptFields = config.scriptFields;
|
||||
this.scrollSize = config.scrollSize;
|
||||
this.source = config.source;
|
||||
this.chunkingConfig = config.chunkingConfig;
|
||||
}
|
||||
|
||||
public void setId(String datafeedId) {
|
||||
id = ExceptionsHelper.requireNonNull(datafeedId, ID.getPreferredName());
|
||||
}
|
||||
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public void setIndexes(List<String> indexes) {
|
||||
this.indexes = ExceptionsHelper.requireNonNull(indexes, INDEXES.getPreferredName());
|
||||
}
|
||||
|
||||
public void setTypes(List<String> types) {
|
||||
this.types = ExceptionsHelper.requireNonNull(types, TYPES.getPreferredName());
|
||||
}
|
||||
|
||||
public void setQueryDelay(long queryDelay) {
|
||||
if (queryDelay < 0) {
|
||||
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
|
||||
DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
this.queryDelay = queryDelay;
|
||||
}
|
||||
|
||||
public void setFrequency(long frequency) {
|
||||
if (frequency <= 0) {
|
||||
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
|
||||
DatafeedConfig.FREQUENCY.getPreferredName(), frequency);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
this.frequency = frequency;
|
||||
}
|
||||
|
||||
public void setQuery(QueryBuilder query) {
|
||||
this.query = ExceptionsHelper.requireNonNull(query, QUERY.getPreferredName());
|
||||
}
|
||||
|
||||
public void setAggregations(AggregatorFactories.Builder aggregations) {
|
||||
this.aggregations = aggregations;
|
||||
}
|
||||
|
||||
public void setScriptFields(List<SearchSourceBuilder.ScriptField> scriptFields) {
|
||||
List<SearchSourceBuilder.ScriptField> sorted = new ArrayList<>();
|
||||
for (SearchSourceBuilder.ScriptField scriptField : scriptFields) {
|
||||
String script = scriptField.script().getIdOrCode();
|
||||
|
||||
if (script.contains("domainSplit(")) {
|
||||
String modifiedCode = DomainSplitFunction.function + "\n" + script;
|
||||
Map<String, Object> modifiedParams = new HashMap<>(scriptField.script().getParams().size()
|
||||
+ DomainSplitFunction.params.size());
|
||||
|
||||
modifiedParams.putAll(scriptField.script().getParams());
|
||||
modifiedParams.putAll(DomainSplitFunction.params);
|
||||
|
||||
Script newScript = new Script(scriptField.script().getType(), scriptField.script().getLang(),
|
||||
modifiedCode, modifiedParams);
|
||||
|
||||
sorted.add(new SearchSourceBuilder.ScriptField(scriptField.fieldName(), newScript, scriptField.ignoreFailure()));
|
||||
} else {
|
||||
sorted.add(scriptField);
|
||||
}
|
||||
|
||||
}
|
||||
sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName));
|
||||
this.scriptFields = sorted;
|
||||
}
|
||||
|
||||
public void setScrollSize(int scrollSize) {
|
||||
if (scrollSize < 0) {
|
||||
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
|
||||
DatafeedConfig.SCROLL_SIZE.getPreferredName(), scrollSize);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
this.scrollSize = scrollSize;
|
||||
}
|
||||
|
||||
public void setSource(boolean enabled) {
|
||||
this.source = enabled;
|
||||
}
|
||||
|
||||
public void setChunkingConfig(ChunkingConfig chunkingConfig) {
|
||||
this.chunkingConfig = chunkingConfig;
|
||||
}
|
||||
|
||||
public DatafeedConfig build() {
|
||||
ExceptionsHelper.requireNonNull(id, ID.getPreferredName());
|
||||
ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
if (!MlStrings.isValidId(id)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName()));
|
||||
}
|
||||
if (indexes == null || indexes.isEmpty() || indexes.contains(null) || indexes.contains("")) {
|
||||
throw invalidOptionValue(INDEXES.getPreferredName(), indexes);
|
||||
}
|
||||
if (types == null || types.isEmpty() || types.contains(null) || types.contains("")) {
|
||||
throw invalidOptionValue(TYPES.getPreferredName(), types);
|
||||
}
|
||||
if (aggregations != null && (scriptFields != null && !scriptFields.isEmpty())) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.DATAFEED_CONFIG_CANNOT_USE_SCRIPT_FIELDS_WITH_AGGS));
|
||||
}
|
||||
return new DatafeedConfig(id, jobId, queryDelay, frequency, indexes, types, query, aggregations, scriptFields, scrollSize,
|
||||
source, chunkingConfig);
|
||||
}
|
||||
|
||||
private static ElasticsearchException invalidOptionValue(String fieldName, Object value) {
|
||||
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, fieldName, value);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,236 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.xpack.ml.action.FlushJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.PostDataAction;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.ml.job.config.DataDescription;
|
||||
import org.elasticsearch.xpack.ml.notifications.Auditor;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
class DatafeedJob {
|
||||
|
||||
private static final Logger LOGGER = Loggers.getLogger(DatafeedJob.class);
|
||||
private static final int NEXT_TASK_DELAY_MS = 100;
|
||||
|
||||
private final Auditor auditor;
|
||||
private final String jobId;
|
||||
private final DataDescription dataDescription;
|
||||
private final long frequencyMs;
|
||||
private final long queryDelayMs;
|
||||
private final Client client;
|
||||
private final DataExtractorFactory dataExtractorFactory;
|
||||
private final Supplier<Long> currentTimeSupplier;
|
||||
|
||||
private volatile long lookbackStartTimeMs;
|
||||
private volatile Long lastEndTimeMs;
|
||||
private AtomicBoolean running = new AtomicBoolean(true);
|
||||
|
||||
DatafeedJob(String jobId, DataDescription dataDescription, long frequencyMs, long queryDelayMs,
|
||||
DataExtractorFactory dataExtractorFactory, Client client, Auditor auditor, Supplier<Long> currentTimeSupplier,
|
||||
long latestFinalBucketEndTimeMs, long latestRecordTimeMs) {
|
||||
this.jobId = jobId;
|
||||
this.dataDescription = Objects.requireNonNull(dataDescription);
|
||||
this.frequencyMs = frequencyMs;
|
||||
this.queryDelayMs = queryDelayMs;
|
||||
this.dataExtractorFactory = dataExtractorFactory;
|
||||
this.client = client;
|
||||
this.auditor = auditor;
|
||||
this.currentTimeSupplier = currentTimeSupplier;
|
||||
|
||||
long lastEndTime = Math.max(latestFinalBucketEndTimeMs, latestRecordTimeMs);
|
||||
if (lastEndTime > 0) {
|
||||
lastEndTimeMs = lastEndTime;
|
||||
}
|
||||
}
|
||||
|
||||
Long runLookBack(long startTime, Long endTime) throws Exception {
|
||||
lookbackStartTimeMs = (lastEndTimeMs != null && lastEndTimeMs + 1 > startTime) ? lastEndTimeMs + 1 : startTime;
|
||||
Optional<Long> endMs = Optional.ofNullable(endTime);
|
||||
long lookbackEnd = endMs.orElse(currentTimeSupplier.get() - queryDelayMs);
|
||||
boolean isLookbackOnly = endMs.isPresent();
|
||||
if (lookbackEnd <= lookbackStartTimeMs) {
|
||||
if (isLookbackOnly) {
|
||||
return null;
|
||||
} else {
|
||||
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STARTED_REALTIME));
|
||||
return nextRealtimeTimestamp();
|
||||
}
|
||||
}
|
||||
|
||||
String msg = Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STARTED_FROM_TO,
|
||||
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackStartTimeMs),
|
||||
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(lookbackEnd));
|
||||
auditor.info(msg);
|
||||
|
||||
FlushJobAction.Request request = new FlushJobAction.Request(jobId);
|
||||
request.setCalcInterim(true);
|
||||
run(lookbackStartTimeMs, lookbackEnd, request);
|
||||
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_LOOKBACK_COMPLETED));
|
||||
LOGGER.info("[{}] Lookback has finished", jobId);
|
||||
if (isLookbackOnly) {
|
||||
return null;
|
||||
} else {
|
||||
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_CONTINUED_REALTIME));
|
||||
return nextRealtimeTimestamp();
|
||||
}
|
||||
}
|
||||
|
||||
long runRealtime() throws Exception {
|
||||
long start = lastEndTimeMs == null ? lookbackStartTimeMs : lastEndTimeMs + 1;
|
||||
long nowMinusQueryDelay = currentTimeSupplier.get() - queryDelayMs;
|
||||
long end = toIntervalStartEpochMs(nowMinusQueryDelay);
|
||||
FlushJobAction.Request request = new FlushJobAction.Request(jobId);
|
||||
request.setCalcInterim(true);
|
||||
request.setAdvanceTime(String.valueOf(lastEndTimeMs));
|
||||
run(start, end, request);
|
||||
return nextRealtimeTimestamp();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the datafeed job
|
||||
*
|
||||
* @return <code>true</code> when the datafeed was running and this method invocation stopped it,
|
||||
* otherwise <code>false</code> is returned
|
||||
*/
|
||||
public boolean stop() {
|
||||
if (running.compareAndSet(true, false)) {
|
||||
auditor.info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_STOPPED));
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isRunning() {
|
||||
return running.get();
|
||||
}
|
||||
|
||||
private void run(long start, long end, FlushJobAction.Request flushRequest) throws IOException {
|
||||
if (end <= start) {
|
||||
return;
|
||||
}
|
||||
|
||||
LOGGER.trace("[{}] Searching data in: [{}, {})", jobId, start, end);
|
||||
|
||||
RuntimeException error = null;
|
||||
long recordCount = 0;
|
||||
DataExtractor dataExtractor = dataExtractorFactory.newExtractor(start, end);
|
||||
while (dataExtractor.hasNext()) {
|
||||
if (!isRunning() && !dataExtractor.isCancelled()) {
|
||||
dataExtractor.cancel();
|
||||
}
|
||||
|
||||
Optional<InputStream> extractedData;
|
||||
try {
|
||||
extractedData = dataExtractor.next();
|
||||
} catch (Exception e) {
|
||||
error = new ExtractionProblemException(e);
|
||||
break;
|
||||
}
|
||||
if (extractedData.isPresent()) {
|
||||
DataCounts counts;
|
||||
try (InputStream in = extractedData.get()) {
|
||||
counts = postData(in);
|
||||
} catch (Exception e) {
|
||||
if (e instanceof InterruptedException) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
error = new AnalysisProblemException(e);
|
||||
break;
|
||||
}
|
||||
recordCount += counts.getProcessedRecordCount();
|
||||
if (counts.getLatestRecordTimeStamp() != null) {
|
||||
lastEndTimeMs = counts.getLatestRecordTimeStamp().getTime();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lastEndTimeMs = Math.max(lastEndTimeMs == null ? 0 : lastEndTimeMs, end - 1);
|
||||
|
||||
// Ensure time is always advanced in order to avoid importing duplicate data.
|
||||
// This is the reason we store the error rather than throw inline.
|
||||
if (error != null) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (recordCount == 0) {
|
||||
throw new EmptyDataCountException();
|
||||
}
|
||||
|
||||
try {
|
||||
client.execute(FlushJobAction.INSTANCE, flushRequest).get();
|
||||
} catch (Exception e) {
|
||||
if (e instanceof InterruptedException) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private DataCounts postData(InputStream inputStream) throws IOException, ExecutionException, InterruptedException {
|
||||
PostDataAction.Request request = new PostDataAction.Request(jobId);
|
||||
request.setDataDescription(dataDescription);
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
Streams.copy(inputStream, outputStream);
|
||||
request.setContent(new BytesArray(outputStream.toByteArray()));
|
||||
PostDataAction.Response response = client.execute(PostDataAction.INSTANCE, request).get();
|
||||
return response.getDataCounts();
|
||||
}
|
||||
|
||||
private long nextRealtimeTimestamp() {
|
||||
long epochMs = currentTimeSupplier.get() + frequencyMs;
|
||||
return toIntervalStartEpochMs(epochMs) + NEXT_TASK_DELAY_MS;
|
||||
}
|
||||
|
||||
private long toIntervalStartEpochMs(long epochMs) {
|
||||
return (epochMs / frequencyMs) * frequencyMs;
|
||||
}
|
||||
|
||||
class AnalysisProblemException extends RuntimeException {
|
||||
|
||||
final long nextDelayInMsSinceEpoch = nextRealtimeTimestamp();
|
||||
|
||||
AnalysisProblemException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
||||
|
||||
class ExtractionProblemException extends RuntimeException {
|
||||
|
||||
final long nextDelayInMsSinceEpoch = nextRealtimeTimestamp();
|
||||
|
||||
ExtractionProblemException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
||||
|
||||
class EmptyDataCountException extends RuntimeException {
|
||||
|
||||
final long nextDelayInMsSinceEpoch = nextRealtimeTimestamp();
|
||||
|
||||
EmptyDataCountException() {}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,248 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.ml.MlPlugin;
|
||||
import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationDataExtractorFactory;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.chunked.ChunkedDataExtractorFactory;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.scroll.ScrollDataExtractorFactory;
|
||||
import org.elasticsearch.xpack.ml.job.config.DataDescription;
|
||||
import org.elasticsearch.xpack.ml.job.config.DefaultFrequency;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.BucketsQueryBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
import org.elasticsearch.xpack.ml.notifications.Auditor;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Collections;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class DatafeedJobRunner extends AbstractComponent {
|
||||
|
||||
private static final String INF_SYMBOL = "\u221E";
|
||||
|
||||
private final Client client;
|
||||
private final ClusterService clusterService;
|
||||
private final JobProvider jobProvider;
|
||||
private final ThreadPool threadPool;
|
||||
private final Supplier<Long> currentTimeSupplier;
|
||||
|
||||
public DatafeedJobRunner(ThreadPool threadPool, Client client, ClusterService clusterService, JobProvider jobProvider,
|
||||
Supplier<Long> currentTimeSupplier) {
|
||||
super(Settings.EMPTY);
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.clusterService = Objects.requireNonNull(clusterService);
|
||||
this.jobProvider = Objects.requireNonNull(jobProvider);
|
||||
this.threadPool = threadPool;
|
||||
this.currentTimeSupplier = Objects.requireNonNull(currentTimeSupplier);
|
||||
}
|
||||
|
||||
public void run(String datafeedId, long startTime, Long endTime, StartDatafeedAction.DatafeedTask task,
|
||||
Consumer<Exception> handler) {
|
||||
MlMetadata mlMetadata = clusterService.state().metaData().custom(MlMetadata.TYPE);
|
||||
DatafeedConfig datafeed = mlMetadata.getDatafeed(datafeedId);
|
||||
Job job = mlMetadata.getJobs().get(datafeed.getJobId());
|
||||
gatherInformation(job.getId(), (buckets, dataCounts) -> {
|
||||
long latestFinalBucketEndMs = -1L;
|
||||
Duration bucketSpan = Duration.ofSeconds(job.getAnalysisConfig().getBucketSpan());
|
||||
if (buckets.results().size() == 1) {
|
||||
latestFinalBucketEndMs = buckets.results().get(0).getTimestamp().getTime() + bucketSpan.toMillis() - 1;
|
||||
}
|
||||
long latestRecordTimeMs = -1L;
|
||||
if (dataCounts.getLatestRecordTimeStamp() != null) {
|
||||
latestRecordTimeMs = dataCounts.getLatestRecordTimeStamp().getTime();
|
||||
}
|
||||
Holder holder = createJobDatafeed(datafeed, job, latestFinalBucketEndMs, latestRecordTimeMs, handler, task);
|
||||
innerRun(holder, startTime, endTime);
|
||||
}, handler);
|
||||
}
|
||||
|
||||
// Important: Holder must be created and assigned to DatafeedTask before setting state to started,
|
||||
// otherwise if a stop datafeed call is made immediately after the start datafeed call we could cancel
|
||||
// the DatafeedTask without stopping datafeed, which causes the datafeed to keep on running.
|
||||
private void innerRun(Holder holder, long startTime, Long endTime) {
|
||||
logger.info("Starting datafeed [{}] for job [{}] in [{}, {})", holder.datafeed.getId(), holder.datafeed.getJobId(),
|
||||
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(startTime),
|
||||
endTime == null ? INF_SYMBOL : DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.printer().print(endTime));
|
||||
holder.future = threadPool.executor(MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME).submit(() -> {
|
||||
Long next = null;
|
||||
try {
|
||||
next = holder.datafeedJob.runLookBack(startTime, endTime);
|
||||
} catch (DatafeedJob.ExtractionProblemException e) {
|
||||
if (endTime == null) {
|
||||
next = e.nextDelayInMsSinceEpoch;
|
||||
}
|
||||
holder.problemTracker.reportExtractionProblem(e.getCause().getMessage());
|
||||
} catch (DatafeedJob.AnalysisProblemException e) {
|
||||
if (endTime == null) {
|
||||
next = e.nextDelayInMsSinceEpoch;
|
||||
}
|
||||
holder.problemTracker.reportAnalysisProblem(e.getCause().getMessage());
|
||||
} catch (DatafeedJob.EmptyDataCountException e) {
|
||||
if (endTime == null && holder.problemTracker.updateEmptyDataCount(true) == false) {
|
||||
next = e.nextDelayInMsSinceEpoch;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed lookback import for job [" + holder.datafeed.getJobId() + "]", e);
|
||||
holder.stop("general_lookback_failure", e);
|
||||
return;
|
||||
}
|
||||
if (next != null) {
|
||||
doDatafeedRealtime(next, holder.datafeed.getJobId(), holder);
|
||||
} else {
|
||||
holder.stop("no_realtime", null);
|
||||
holder.problemTracker.finishReport();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void doDatafeedRealtime(long delayInMsSinceEpoch, String jobId, Holder holder) {
|
||||
if (holder.isRunning()) {
|
||||
TimeValue delay = computeNextDelay(delayInMsSinceEpoch);
|
||||
logger.debug("Waiting [{}] before executing next realtime import for job [{}]", delay, jobId);
|
||||
holder.future = threadPool.schedule(delay, MlPlugin.DATAFEED_RUNNER_THREAD_POOL_NAME, () -> {
|
||||
long nextDelayInMsSinceEpoch;
|
||||
try {
|
||||
nextDelayInMsSinceEpoch = holder.datafeedJob.runRealtime();
|
||||
} catch (DatafeedJob.ExtractionProblemException e) {
|
||||
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
|
||||
holder.problemTracker.reportExtractionProblem(e.getCause().getMessage());
|
||||
} catch (DatafeedJob.AnalysisProblemException e) {
|
||||
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
|
||||
holder.problemTracker.reportAnalysisProblem(e.getCause().getMessage());
|
||||
} catch (DatafeedJob.EmptyDataCountException e) {
|
||||
nextDelayInMsSinceEpoch = e.nextDelayInMsSinceEpoch;
|
||||
if (holder.problemTracker.updateEmptyDataCount(true)) {
|
||||
holder.problemTracker.finishReport();
|
||||
holder.stop("empty_data", e);
|
||||
return;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Unexpected datafeed failure for job [" + jobId + "] stopping...", e);
|
||||
holder.stop("general_realtime_error", e);
|
||||
return;
|
||||
}
|
||||
holder.problemTracker.finishReport();
|
||||
doDatafeedRealtime(nextDelayInMsSinceEpoch, jobId, holder);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private Holder createJobDatafeed(DatafeedConfig datafeed, Job job, long finalBucketEndMs, long latestRecordTimeMs,
|
||||
Consumer<Exception> handler, StartDatafeedAction.DatafeedTask task) {
|
||||
Auditor auditor = jobProvider.audit(job.getId());
|
||||
Duration frequency = getFrequencyOrDefault(datafeed, job);
|
||||
Duration queryDelay = Duration.ofSeconds(datafeed.getQueryDelay());
|
||||
DataExtractorFactory dataExtractorFactory = createDataExtractorFactory(datafeed, job);
|
||||
DatafeedJob datafeedJob = new DatafeedJob(job.getId(), buildDataDescription(job), frequency.toMillis(), queryDelay.toMillis(),
|
||||
dataExtractorFactory, client, auditor, currentTimeSupplier, finalBucketEndMs, latestRecordTimeMs);
|
||||
Holder holder = new Holder(datafeed, datafeedJob, new ProblemTracker(() -> auditor), handler);
|
||||
task.setHolder(holder);
|
||||
return holder;
|
||||
}
|
||||
|
||||
DataExtractorFactory createDataExtractorFactory(DatafeedConfig datafeedConfig, Job job) {
|
||||
boolean isScrollSearch = datafeedConfig.getAggregations() == null;
|
||||
DataExtractorFactory dataExtractorFactory = isScrollSearch ? new ScrollDataExtractorFactory(client, datafeedConfig, job)
|
||||
: new AggregationDataExtractorFactory(client, datafeedConfig, job);
|
||||
ChunkingConfig chunkingConfig = datafeedConfig.getChunkingConfig();
|
||||
if (chunkingConfig == null) {
|
||||
chunkingConfig = isScrollSearch ? ChunkingConfig.newAuto() : ChunkingConfig.newOff();
|
||||
}
|
||||
|
||||
return chunkingConfig.isEnabled() ? new ChunkedDataExtractorFactory(client, datafeedConfig, job, dataExtractorFactory)
|
||||
: dataExtractorFactory;
|
||||
}
|
||||
|
||||
private static DataDescription buildDataDescription(Job job) {
|
||||
DataDescription.Builder dataDescription = new DataDescription.Builder();
|
||||
dataDescription.setFormat(DataDescription.DataFormat.JSON);
|
||||
if (job.getDataDescription() != null) {
|
||||
dataDescription.setTimeField(job.getDataDescription().getTimeField());
|
||||
}
|
||||
dataDescription.setTimeFormat(DataDescription.EPOCH_MS);
|
||||
return dataDescription.build();
|
||||
}
|
||||
|
||||
private void gatherInformation(String jobId, BiConsumer<QueryPage<Bucket>, DataCounts> handler, Consumer<Exception> errorHandler) {
|
||||
BucketsQueryBuilder.BucketsQuery latestBucketQuery = new BucketsQueryBuilder()
|
||||
.sortField(Bucket.TIMESTAMP.getPreferredName())
|
||||
.sortDescending(true).size(1)
|
||||
.includeInterim(false)
|
||||
.build();
|
||||
jobProvider.buckets(jobId, latestBucketQuery, buckets -> {
|
||||
jobProvider.dataCounts(jobId, dataCounts -> handler.accept(buckets, dataCounts), errorHandler);
|
||||
}, e -> {
|
||||
if (e instanceof ResourceNotFoundException) {
|
||||
QueryPage<Bucket> empty = new QueryPage<>(Collections.emptyList(), 0, Bucket.RESULT_TYPE_FIELD);
|
||||
jobProvider.dataCounts(jobId, dataCounts -> handler.accept(empty, dataCounts), errorHandler);
|
||||
} else {
|
||||
errorHandler.accept(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static Duration getFrequencyOrDefault(DatafeedConfig datafeed, Job job) {
|
||||
Long frequency = datafeed.getFrequency();
|
||||
Long bucketSpan = job.getAnalysisConfig().getBucketSpan();
|
||||
return frequency == null ? DefaultFrequency.ofBucketSpan(bucketSpan) : Duration.ofSeconds(frequency);
|
||||
}
|
||||
|
||||
private TimeValue computeNextDelay(long next) {
|
||||
return new TimeValue(Math.max(1, next - currentTimeSupplier.get()));
|
||||
}
|
||||
|
||||
public class Holder {
|
||||
|
||||
private final DatafeedConfig datafeed;
|
||||
private final DatafeedJob datafeedJob;
|
||||
private final ProblemTracker problemTracker;
|
||||
private final Consumer<Exception> handler;
|
||||
volatile Future<?> future;
|
||||
|
||||
private Holder(DatafeedConfig datafeed, DatafeedJob datafeedJob, ProblemTracker problemTracker, Consumer<Exception> handler) {
|
||||
this.datafeed = datafeed;
|
||||
this.datafeedJob = datafeedJob;
|
||||
this.problemTracker = problemTracker;
|
||||
this.handler = handler;
|
||||
}
|
||||
|
||||
boolean isRunning() {
|
||||
return datafeedJob.isRunning();
|
||||
}
|
||||
|
||||
public void stop(String source, Exception e) {
|
||||
logger.info("[{}] attempt to stop datafeed [{}] for job [{}]", source, datafeed.getId(), datafeed.getJobId());
|
||||
if (datafeedJob.stop()) {
|
||||
FutureUtils.cancel(future);
|
||||
handler.accept(e);
|
||||
logger.info("[{}] datafeed [{}] for job [{}] has been stopped", source, datafeed.getId(), datafeed.getJobId());
|
||||
} else {
|
||||
logger.info("[{}] datafeed [{}] for job [{}] was already stopped", source, datafeed.getId(), datafeed.getJobId());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.xpack.ml.job.config.AnalysisConfig;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
public final class DatafeedJobValidator {
|
||||
|
||||
private DatafeedJobValidator() {}
|
||||
|
||||
/**
|
||||
* Validates a datafeedConfig in relation to the job it refers to
|
||||
* @param datafeedConfig the datafeed config
|
||||
* @param job the job
|
||||
*/
|
||||
public static void validate(DatafeedConfig datafeedConfig, Job job) {
|
||||
AnalysisConfig analysisConfig = job.getAnalysisConfig();
|
||||
if (analysisConfig.getLatency() != null && analysisConfig.getLatency() > 0) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY));
|
||||
}
|
||||
if (datafeedConfig.getAggregations() != null && !DatafeedConfig.DOC_COUNT.equals(analysisConfig.getSummaryCountFieldName())) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD, DatafeedConfig.DOC_COUNT));
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public enum DatafeedState implements Writeable {
|
||||
|
||||
STARTED, STOPPED;
|
||||
|
||||
public static DatafeedState fromString(String name) {
|
||||
return valueOf(name.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static DatafeedState fromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown public enum DatafeedState ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
@ -0,0 +1,112 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed;
|
||||
|
||||
import org.elasticsearch.xpack.ml.notifications.Auditor;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Keeps track of problems the datafeed encounters and audits
|
||||
* messages appropriately.
|
||||
* </p>
|
||||
* <p>
|
||||
* The {@code ProblemTracker} is expected to interact with multiple
|
||||
* threads (lookback executor, real-time executor). However, each
|
||||
* thread will be accessing in a sequential manner therefore we
|
||||
* only need to ensure correct visibility.
|
||||
* </p>
|
||||
*/
|
||||
class ProblemTracker {
|
||||
|
||||
private static final int EMPTY_DATA_WARN_COUNT = 10;
|
||||
|
||||
private final Supplier<Auditor> auditor;
|
||||
|
||||
private volatile boolean hasProblems;
|
||||
private volatile boolean hadProblems;
|
||||
private volatile String previousProblem;
|
||||
|
||||
private volatile int emptyDataCount;
|
||||
|
||||
ProblemTracker(Supplier<Auditor> auditor) {
|
||||
this.auditor = Objects.requireNonNull(auditor);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reports as analysis problem if it is different than the last seen problem
|
||||
*
|
||||
* @param problemMessage the problem message
|
||||
*/
|
||||
public void reportAnalysisProblem(String problemMessage) {
|
||||
reportProblem(Messages.JOB_AUDIT_DATAFEED_DATA_ANALYSIS_ERROR, problemMessage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reports as extraction problem if it is different than the last seen problem
|
||||
*
|
||||
* @param problemMessage the problem message
|
||||
*/
|
||||
public void reportExtractionProblem(String problemMessage) {
|
||||
reportProblem(Messages.JOB_AUDIT_DATAFEED_DATA_EXTRACTION_ERROR, problemMessage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reports the problem if it is different than the last seen problem
|
||||
*
|
||||
* @param problemMessage the problem message
|
||||
*/
|
||||
private void reportProblem(String template, String problemMessage) {
|
||||
hasProblems = true;
|
||||
if (!Objects.equals(previousProblem, problemMessage)) {
|
||||
previousProblem = problemMessage;
|
||||
auditor.get().error(Messages.getMessage(template, problemMessage));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the tracking of empty data cycles. If the number of consecutive empty data
|
||||
* cycles reaches {@code EMPTY_DATA_WARN_COUNT}, a warning is reported. If non-empty
|
||||
* is reported and a warning was issued previously, a recovery info is reported.
|
||||
*
|
||||
* @param empty Whether data was seen since last report
|
||||
* @return {@code true} if an empty data warning was issued, {@code false} otherwise
|
||||
*/
|
||||
public boolean updateEmptyDataCount(boolean empty) {
|
||||
if (empty && emptyDataCount < EMPTY_DATA_WARN_COUNT) {
|
||||
emptyDataCount++;
|
||||
if (emptyDataCount == EMPTY_DATA_WARN_COUNT) {
|
||||
auditor.get().warning(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_NO_DATA));
|
||||
return true;
|
||||
}
|
||||
} else if (!empty) {
|
||||
if (emptyDataCount >= EMPTY_DATA_WARN_COUNT) {
|
||||
auditor.get().info(Messages.getMessage(Messages.JOB_AUDIR_DATAFEED_DATA_SEEN_AGAIN));
|
||||
}
|
||||
emptyDataCount = 0;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean hasProblems() {
|
||||
return hasProblems;
|
||||
}
|
||||
|
||||
/**
|
||||
* Issues a recovery message if appropriate and prepares for next report
|
||||
*/
|
||||
public void finishReport() {
|
||||
if (!hasProblems && hadProblems) {
|
||||
auditor.get().info(Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_RECOVERED));
|
||||
}
|
||||
|
||||
hadProblems = hasProblems;
|
||||
hasProblems = false;
|
||||
}
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface DataExtractor {
|
||||
|
||||
/**
|
||||
* @return {@code true} if the search has not finished yet, or {@code false} otherwise
|
||||
*/
|
||||
boolean hasNext();
|
||||
|
||||
/**
|
||||
* Returns the next available extracted data. Note that it is possible for the
|
||||
* extracted data to be empty the last time this method can be called.
|
||||
* @return an optional input stream with the next available extracted data
|
||||
* @throws IOException if an error occurs while extracting the data
|
||||
*/
|
||||
Optional<InputStream> next() throws IOException;
|
||||
|
||||
/**
|
||||
* @return {@code true} if the extractor has been cancelled, or {@code false} otherwise
|
||||
*/
|
||||
boolean isCancelled();
|
||||
|
||||
/**
|
||||
* Cancel the current search.
|
||||
*/
|
||||
void cancel();
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor;
|
||||
|
||||
public interface DataExtractorFactory {
|
||||
DataExtractor newExtractor(long start, long end);
|
||||
}
|
@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Collects common utility methods needed by various {@link DataExtractor} implementations
|
||||
*/
|
||||
public final class ExtractorUtils {
|
||||
|
||||
private static final Logger LOGGER = Loggers.getLogger(ExtractorUtils.class);
|
||||
private static final String EPOCH_MILLIS = "epoch_millis";
|
||||
|
||||
private ExtractorUtils() {}
|
||||
|
||||
/**
|
||||
* Combines a user query with a time range query.
|
||||
*/
|
||||
public static QueryBuilder wrapInTimeRangeQuery(QueryBuilder userQuery, String timeField, long start, long end) {
|
||||
QueryBuilder timeQuery = new RangeQueryBuilder(timeField).gte(start).lt(end).format(EPOCH_MILLIS);
|
||||
return new BoolQueryBuilder().filter(userQuery).filter(timeQuery);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that a {@link SearchResponse} has an OK status code and no shard failures
|
||||
*/
|
||||
public static void checkSearchWasSuccessful(String jobId, SearchResponse searchResponse) throws IOException {
|
||||
if (searchResponse.status() != RestStatus.OK) {
|
||||
throw new IOException("[" + jobId + "] Search request returned status code: " + searchResponse.status()
|
||||
+ ". Response was:\n" + searchResponse.toString());
|
||||
}
|
||||
ShardSearchFailure[] shardFailures = searchResponse.getShardFailures();
|
||||
if (shardFailures != null && shardFailures.length > 0) {
|
||||
LOGGER.error("[{}] Search request returned shard failures: {}", jobId, Arrays.toString(shardFailures));
|
||||
throw new IOException("[" + jobId + "] Search request returned shard failures; see more info in the logs");
|
||||
}
|
||||
int unavailableShards = searchResponse.getTotalShards() - searchResponse.getSuccessfulShards();
|
||||
if (unavailableShards > 0) {
|
||||
throw new IOException("[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards");
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,108 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* An implementation that extracts data from elasticsearch using search with aggregations on a client.
|
||||
* Cancellation is effective only when it is called before the first time {@link #next()} is called.
|
||||
* Note that this class is NOT thread-safe.
|
||||
*/
|
||||
class AggregationDataExtractor implements DataExtractor {
|
||||
|
||||
private static final Logger LOGGER = Loggers.getLogger(AggregationDataExtractor.class);
|
||||
|
||||
private final Client client;
|
||||
private final AggregationDataExtractorContext context;
|
||||
private boolean hasNext;
|
||||
private boolean isCancelled;
|
||||
|
||||
AggregationDataExtractor(Client client, AggregationDataExtractorContext dataExtractorContext) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.context = Objects.requireNonNull(dataExtractorContext);
|
||||
this.hasNext = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return hasNext && !isCancelled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCancelled() {
|
||||
return isCancelled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cancel() {
|
||||
LOGGER.trace("[{}] Data extractor received cancel request", context.jobId);
|
||||
isCancelled = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<InputStream> next() throws IOException {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
Optional<InputStream> stream = Optional.ofNullable(search());
|
||||
hasNext = false;
|
||||
return stream;
|
||||
}
|
||||
|
||||
private InputStream search() throws IOException {
|
||||
LOGGER.debug("[{}] Executing aggregated search", context.jobId);
|
||||
SearchResponse searchResponse = executeSearchRequest(buildSearchRequest());
|
||||
ExtractorUtils.checkSearchWasSuccessful(context.jobId, searchResponse);
|
||||
return processSearchResponse(searchResponse);
|
||||
}
|
||||
|
||||
protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) {
|
||||
return searchRequestBuilder.get();
|
||||
}
|
||||
|
||||
private SearchRequestBuilder buildSearchRequest() {
|
||||
SearchRequestBuilder searchRequestBuilder = SearchAction.INSTANCE.newRequestBuilder(client)
|
||||
.setIndices(context.indexes)
|
||||
.setTypes(context.types)
|
||||
.setSize(0)
|
||||
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, context.start, context.end));
|
||||
|
||||
context.aggs.getAggregatorFactories().forEach(a -> searchRequestBuilder.addAggregation(a));
|
||||
context.aggs.getPipelineAggregatorFactories().forEach(a -> searchRequestBuilder.addAggregation(a));
|
||||
return searchRequestBuilder;
|
||||
}
|
||||
|
||||
private InputStream processSearchResponse(SearchResponse searchResponse) throws IOException {
|
||||
if (searchResponse.getAggregations() == null) {
|
||||
return null;
|
||||
}
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
try (AggregationToJsonProcessor processor = new AggregationToJsonProcessor(outputStream)) {
|
||||
for (Aggregation agg : searchResponse.getAggregations().asList()) {
|
||||
processor.process(agg);
|
||||
}
|
||||
}
|
||||
return new ByteArrayInputStream(outputStream.toByteArray());
|
||||
}
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
|
||||
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
class AggregationDataExtractorContext {
|
||||
|
||||
final String jobId;
|
||||
final String timeField;
|
||||
final String[] indexes;
|
||||
final String[] types;
|
||||
final QueryBuilder query;
|
||||
final AggregatorFactories.Builder aggs;
|
||||
final long start;
|
||||
final long end;
|
||||
|
||||
AggregationDataExtractorContext(String jobId, String timeField, List<String> indexes, List<String> types, QueryBuilder query,
|
||||
AggregatorFactories.Builder aggs, long start, long end) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.timeField = Objects.requireNonNull(timeField);
|
||||
this.indexes = indexes.toArray(new String[indexes.size()]);
|
||||
this.types = types.toArray(new String[types.size()]);
|
||||
this.query = Objects.requireNonNull(query);
|
||||
this.aggs = Objects.requireNonNull(aggs);
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class AggregationDataExtractorFactory implements DataExtractorFactory {
|
||||
|
||||
private final Client client;
|
||||
private final DatafeedConfig datafeedConfig;
|
||||
private final Job job;
|
||||
|
||||
public AggregationDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
|
||||
this.job = Objects.requireNonNull(job);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataExtractor newExtractor(long start, long end) {
|
||||
AggregationDataExtractorContext dataExtractorContext = new AggregationDataExtractorContext(
|
||||
job.getId(),
|
||||
job.getDataDescription().getTimeField(),
|
||||
datafeedConfig.getIndexes(),
|
||||
datafeedConfig.getTypes(),
|
||||
datafeedConfig.getQuery(),
|
||||
datafeedConfig.getAggregations(),
|
||||
start,
|
||||
end);
|
||||
return new AggregationDataExtractor(client, dataExtractorContext);
|
||||
}
|
||||
}
|
@ -0,0 +1,110 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation;
|
||||
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.joda.time.base.BaseDateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Processes {@link Aggregation} objects and writes flat JSON documents for each leaf aggregation.
|
||||
*/
|
||||
class AggregationToJsonProcessor implements Releasable {
|
||||
|
||||
private final XContentBuilder jsonBuilder;
|
||||
private final Map<String, Object> keyValuePairs;
|
||||
|
||||
AggregationToJsonProcessor(OutputStream outputStream) throws IOException {
|
||||
jsonBuilder = new XContentBuilder(JsonXContent.jsonXContent, outputStream);
|
||||
keyValuePairs = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes an {@link Aggregation} and writes a flat JSON document for each of its leaf aggregations.
|
||||
* It expects aggregations to have 0..1 sub-aggregations.
|
||||
* It expects the top level aggregation to be {@link Histogram}.
|
||||
* It expects that all sub-aggregations of the top level are either {@link Terms} or {@link NumericMetricsAggregation.SingleValue}.
|
||||
*/
|
||||
public void process(Aggregation aggregation) throws IOException {
|
||||
if (aggregation instanceof Histogram) {
|
||||
processHistogram((Histogram) aggregation);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Top level aggregation should be [histogram]");
|
||||
}
|
||||
}
|
||||
|
||||
private void processHistogram(Histogram histogram) throws IOException {
|
||||
for (Histogram.Bucket bucket : histogram.getBuckets()) {
|
||||
Object timestamp = bucket.getKey();
|
||||
if (timestamp instanceof BaseDateTime) {
|
||||
timestamp = ((BaseDateTime) timestamp).getMillis();
|
||||
}
|
||||
keyValuePairs.put(histogram.getName(), timestamp);
|
||||
processNestedAggs(bucket.getDocCount(), bucket.getAggregations());
|
||||
}
|
||||
}
|
||||
|
||||
private void processNestedAggs(long docCount, Aggregations subAggs) throws IOException {
|
||||
List<Aggregation> aggs = subAggs == null ? Collections.emptyList() : subAggs.asList();
|
||||
if (aggs.isEmpty()) {
|
||||
writeJsonObject(docCount);
|
||||
return;
|
||||
}
|
||||
if (aggs.size() > 1) {
|
||||
throw new IllegalArgumentException("Multiple nested aggregations are not supported");
|
||||
}
|
||||
Aggregation nestedAgg = aggs.get(0);
|
||||
if (nestedAgg instanceof Terms) {
|
||||
processTerms((Terms) nestedAgg);
|
||||
} else if (nestedAgg instanceof NumericMetricsAggregation.SingleValue) {
|
||||
processSingleValue(docCount, (NumericMetricsAggregation.SingleValue) nestedAgg);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unsupported aggregation type [" + nestedAgg.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
private void processTerms(Terms termsAgg) throws IOException {
|
||||
for (Terms.Bucket bucket : termsAgg.getBuckets()) {
|
||||
keyValuePairs.put(termsAgg.getName(), bucket.getKey());
|
||||
processNestedAggs(bucket.getDocCount(), bucket.getAggregations());
|
||||
}
|
||||
}
|
||||
|
||||
private void processSingleValue(long docCount, NumericMetricsAggregation.SingleValue singleValue) throws IOException {
|
||||
keyValuePairs.put(singleValue.getName(), singleValue.value());
|
||||
writeJsonObject(docCount);
|
||||
}
|
||||
|
||||
private void writeJsonObject(long docCount) throws IOException {
|
||||
if (docCount > 0) {
|
||||
jsonBuilder.startObject();
|
||||
for (Map.Entry<String, Object> keyValue : keyValuePairs.entrySet()) {
|
||||
jsonBuilder.field(keyValue.getKey(), keyValue.getValue());
|
||||
}
|
||||
jsonBuilder.field(DatafeedConfig.DOC_COUNT, docCount);
|
||||
jsonBuilder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
jsonBuilder.close();
|
||||
}
|
||||
}
|
@ -0,0 +1,212 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.chunked;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* A wrapper {@link DataExtractor} that can be used with other extractors in order to perform
|
||||
* searches in smaller chunks of the time range.
|
||||
*
|
||||
* <p> The chunk span can be either specified or not. When not specified,
|
||||
* a heuristic is employed (see {@link DataSummary#estimateChunk()}) to automatically determine the chunk span.
|
||||
* The search is set up (see {@link #setUpChunkedSearch()} by querying a data summary for the given time range
|
||||
* that includes the number of total hits and the earliest/latest times. Those are then used to determine the chunk span,
|
||||
* when necessary, and to jump the search forward to the time where the earliest data can be found.
|
||||
* If a search for a chunk returns empty, the set up is performed again for the remaining time.
|
||||
*
|
||||
* <p> Cancellation's behaviour depends on the delegate extractor.
|
||||
*
|
||||
* <p> Note that this class is NOT thread-safe.
|
||||
*/
|
||||
public class ChunkedDataExtractor implements DataExtractor {
|
||||
|
||||
private static final Logger LOGGER = Loggers.getLogger(ChunkedDataExtractor.class);
|
||||
|
||||
private static final String EARLIEST_TIME = "earliest_time";
|
||||
private static final String LATEST_TIME = "latest_time";
|
||||
private static final String VALUE_SUFFIX = ".value";
|
||||
|
||||
/** Let us set a minimum chunk span of 1 minute */
|
||||
private static final long MIN_CHUNK_SPAN = 60000L;
|
||||
|
||||
private final Client client;
|
||||
private final DataExtractorFactory dataExtractorFactory;
|
||||
private final ChunkedDataExtractorContext context;
|
||||
private long currentStart;
|
||||
private long currentEnd;
|
||||
private long chunkSpan;
|
||||
private boolean isCancelled;
|
||||
private DataExtractor currentExtractor;
|
||||
|
||||
public ChunkedDataExtractor(Client client, DataExtractorFactory dataExtractorFactory, ChunkedDataExtractorContext context) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.dataExtractorFactory = Objects.requireNonNull(dataExtractorFactory);
|
||||
this.context = Objects.requireNonNull(context);
|
||||
this.currentStart = context.start;
|
||||
this.currentEnd = context.start;
|
||||
this.isCancelled = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
boolean currentHasNext = currentExtractor != null && currentExtractor.hasNext();
|
||||
if (isCancelled()) {
|
||||
return currentHasNext;
|
||||
}
|
||||
return currentHasNext || currentEnd < context.end;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<InputStream> next() throws IOException {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
|
||||
if (currentExtractor == null) {
|
||||
// This is the first time next is called
|
||||
setUpChunkedSearch();
|
||||
}
|
||||
|
||||
return getNextStream();
|
||||
}
|
||||
|
||||
private void setUpChunkedSearch() throws IOException {
|
||||
DataSummary dataSummary = requestDataSummary();
|
||||
if (dataSummary.totalHits > 0) {
|
||||
currentStart = dataSummary.earliestTime;
|
||||
currentEnd = currentStart;
|
||||
chunkSpan = context.chunkSpan == null ? dataSummary.estimateChunk() : context.chunkSpan;
|
||||
LOGGER.info("Chunked search configured: totalHits = {}, dataTimeSpread = {} ms, chunk span = {} ms",
|
||||
dataSummary.totalHits, dataSummary.getDataTimeSpread(), chunkSpan);
|
||||
} else {
|
||||
// search is over
|
||||
currentEnd = context.end;
|
||||
}
|
||||
}
|
||||
|
||||
private DataSummary requestDataSummary() throws IOException {
|
||||
SearchRequestBuilder searchRequestBuilder = SearchAction.INSTANCE.newRequestBuilder(client)
|
||||
.setSize(0)
|
||||
.setIndices(context.indexes)
|
||||
.setTypes(context.types)
|
||||
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, currentStart, context.end))
|
||||
.addAggregation(AggregationBuilders.min(EARLIEST_TIME).field(context.timeField))
|
||||
.addAggregation(AggregationBuilders.max(LATEST_TIME).field(context.timeField));
|
||||
|
||||
SearchResponse response = executeSearchRequest(searchRequestBuilder);
|
||||
|
||||
ExtractorUtils.checkSearchWasSuccessful(context.jobId, response);
|
||||
|
||||
Aggregations aggregations = response.getAggregations();
|
||||
long earliestTime = 0;
|
||||
long latestTime = 0;
|
||||
long totalHits = response.getHits().getTotalHits();
|
||||
if (totalHits > 0) {
|
||||
earliestTime = (long) Double.parseDouble(aggregations.getProperty(EARLIEST_TIME + VALUE_SUFFIX).toString());
|
||||
latestTime = (long) Double.parseDouble(aggregations.getProperty(LATEST_TIME + VALUE_SUFFIX).toString());
|
||||
}
|
||||
return new DataSummary(earliestTime, latestTime, totalHits);
|
||||
}
|
||||
|
||||
protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) {
|
||||
return searchRequestBuilder.get();
|
||||
}
|
||||
|
||||
private Optional<InputStream> getNextStream() throws IOException {
|
||||
while (hasNext()) {
|
||||
boolean isNewSearch = false;
|
||||
|
||||
if (currentExtractor == null || currentExtractor.hasNext() == false) {
|
||||
// First search or the current search finished; we can advance to the next search
|
||||
advanceTime();
|
||||
isNewSearch = true;
|
||||
}
|
||||
|
||||
Optional<InputStream> nextStream = currentExtractor.next();
|
||||
if (nextStream.isPresent()) {
|
||||
return nextStream;
|
||||
}
|
||||
|
||||
if (isNewSearch && hasNext()) {
|
||||
// If it was a new search it means it returned 0 results. Thus,
|
||||
// we reconfigure and jump to the next time interval where there are data.
|
||||
setUpChunkedSearch();
|
||||
}
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
private void advanceTime() {
|
||||
currentStart = currentEnd;
|
||||
currentEnd = Math.min(currentStart + chunkSpan, context.end);
|
||||
currentExtractor = dataExtractorFactory.newExtractor(currentStart, currentEnd);
|
||||
LOGGER.trace("advances time to [{}, {})", currentStart, currentEnd);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCancelled() {
|
||||
return isCancelled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cancel() {
|
||||
if (currentExtractor != null) {
|
||||
currentExtractor.cancel();
|
||||
}
|
||||
isCancelled = true;
|
||||
}
|
||||
|
||||
private class DataSummary {
|
||||
|
||||
private long earliestTime;
|
||||
private long latestTime;
|
||||
private long totalHits;
|
||||
|
||||
private DataSummary(long earliestTime, long latestTime, long totalHits) {
|
||||
this.earliestTime = earliestTime;
|
||||
this.latestTime = latestTime;
|
||||
this.totalHits = totalHits;
|
||||
}
|
||||
|
||||
private long getDataTimeSpread() {
|
||||
return latestTime - earliestTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The heuristic here is that we want a time interval where we expect roughly scrollSize documents
|
||||
* (assuming data are uniformly spread over time).
|
||||
* We have totalHits documents over dataTimeSpread (latestTime - earliestTime), we want scrollSize documents over chunk.
|
||||
* Thus, the interval would be (scrollSize * dataTimeSpread) / totalHits.
|
||||
* However, assuming this as the chunk span may often lead to half-filled pages or empty searches.
|
||||
* It is beneficial to take a multiple of that. Based on benchmarking, we set this to 10x.
|
||||
*/
|
||||
private long estimateChunk() {
|
||||
long dataTimeSpread = getDataTimeSpread();
|
||||
if (totalHits <= 0 || dataTimeSpread <= 0) {
|
||||
return context.end - currentEnd;
|
||||
}
|
||||
long estimatedChunk = 10 * (context.scrollSize * getDataTimeSpread()) / totalHits;
|
||||
return Math.max(estimatedChunk, MIN_CHUNK_SPAN);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.chunked;
|
||||
|
||||
import org.elasticsearch.common.inject.internal.Nullable;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
class ChunkedDataExtractorContext {
|
||||
|
||||
final String jobId;
|
||||
final String timeField;
|
||||
final String[] indexes;
|
||||
final String[] types;
|
||||
final QueryBuilder query;
|
||||
final int scrollSize;
|
||||
final long start;
|
||||
final long end;
|
||||
final Long chunkSpan;
|
||||
|
||||
ChunkedDataExtractorContext(String jobId, String timeField, List<String> indexes, List<String> types,
|
||||
QueryBuilder query, int scrollSize, long start, long end, @Nullable Long chunkSpan) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.timeField = Objects.requireNonNull(timeField);
|
||||
this.indexes = indexes.toArray(new String[indexes.size()]);
|
||||
this.types = types.toArray(new String[types.size()]);
|
||||
this.query = Objects.requireNonNull(query);
|
||||
this.scrollSize = scrollSize;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
this.chunkSpan = chunkSpan;
|
||||
}
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.chunked;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class ChunkedDataExtractorFactory implements DataExtractorFactory {
|
||||
|
||||
private final Client client;
|
||||
private final DatafeedConfig datafeedConfig;
|
||||
private final Job job;
|
||||
private final DataExtractorFactory dataExtractorFactory;
|
||||
|
||||
public ChunkedDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job, DataExtractorFactory dataExtractorFactory) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
|
||||
this.job = Objects.requireNonNull(job);
|
||||
this.dataExtractorFactory = Objects.requireNonNull(dataExtractorFactory);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataExtractor newExtractor(long start, long end) {
|
||||
ChunkedDataExtractorContext dataExtractorContext = new ChunkedDataExtractorContext(
|
||||
job.getId(),
|
||||
job.getDataDescription().getTimeField(),
|
||||
datafeedConfig.getIndexes(),
|
||||
datafeedConfig.getTypes(),
|
||||
datafeedConfig.getQuery(),
|
||||
datafeedConfig.getScrollSize(),
|
||||
start,
|
||||
end,
|
||||
datafeedConfig.getChunkingConfig() == null ? null : datafeedConfig.getChunkingConfig().getTimeSpan());
|
||||
return new ChunkedDataExtractor(client, dataExtractorFactory, dataExtractorContext);
|
||||
}
|
||||
}
|
@ -0,0 +1,134 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
|
||||
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.joda.time.base.BaseDateTime;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
abstract class ExtractedField {
|
||||
|
||||
public enum ExtractionMethod {
|
||||
SOURCE, DOC_VALUE, SCRIPT_FIELD
|
||||
}
|
||||
|
||||
protected final String name;
|
||||
private final ExtractionMethod extractionMethod;
|
||||
|
||||
protected ExtractedField(String name, ExtractionMethod extractionMethod) {
|
||||
this.name = Objects.requireNonNull(name);
|
||||
this.extractionMethod = Objects.requireNonNull(extractionMethod);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public ExtractionMethod getExtractionMethod() {
|
||||
return extractionMethod;
|
||||
}
|
||||
|
||||
public abstract Object[] value(SearchHit hit);
|
||||
|
||||
public static ExtractedField newTimeField(String name, ExtractionMethod extractionMethod) {
|
||||
if (extractionMethod == ExtractionMethod.SOURCE) {
|
||||
throw new IllegalArgumentException("time field cannot be extracted from source");
|
||||
}
|
||||
return new TimeField(name, extractionMethod);
|
||||
}
|
||||
|
||||
public static ExtractedField newField(String name, ExtractionMethod extractionMethod) {
|
||||
switch (extractionMethod) {
|
||||
case DOC_VALUE:
|
||||
case SCRIPT_FIELD:
|
||||
return new FromFields(name, extractionMethod);
|
||||
case SOURCE:
|
||||
return new FromSource(name, extractionMethod);
|
||||
default:
|
||||
throw new IllegalArgumentException("Invalid extraction method [" + extractionMethod + "]");
|
||||
}
|
||||
}
|
||||
|
||||
private static class FromFields extends ExtractedField {
|
||||
|
||||
FromFields(String name, ExtractionMethod extractionMethod) {
|
||||
super(name, extractionMethod);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] value(SearchHit hit) {
|
||||
SearchHitField keyValue = hit.field(name);
|
||||
if (keyValue != null) {
|
||||
List<Object> values = keyValue.getValues();
|
||||
return values.toArray(new Object[values.size()]);
|
||||
}
|
||||
return new Object[0];
|
||||
}
|
||||
}
|
||||
|
||||
private static class TimeField extends FromFields {
|
||||
|
||||
TimeField(String name, ExtractionMethod extractionMethod) {
|
||||
super(name, extractionMethod);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] value(SearchHit hit) {
|
||||
Object[] value = super.value(hit);
|
||||
if (value.length != 1) {
|
||||
return value;
|
||||
}
|
||||
value[0] = ((BaseDateTime) value[0]).getMillis();
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
private static class FromSource extends ExtractedField {
|
||||
|
||||
private String[] namePath;
|
||||
|
||||
FromSource(String name, ExtractionMethod extractionMethod) {
|
||||
super(name, extractionMethod);
|
||||
namePath = name.split("\\.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] value(SearchHit hit) {
|
||||
Map<String, Object> source = hit.getSourceAsMap();
|
||||
int level = 0;
|
||||
while (source != null && level < namePath.length - 1) {
|
||||
source = getNextLevel(source, namePath[level]);
|
||||
level++;
|
||||
}
|
||||
if (source != null) {
|
||||
Object values = source.get(namePath[level]);
|
||||
if (values != null) {
|
||||
if (values instanceof List<?>) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object> asList = (List<Object>) values;
|
||||
return asList.toArray(new Object[asList.size()]);
|
||||
} else {
|
||||
return new Object[]{values};
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Object[0];
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> getNextLevel(Map<String, Object> source, String key) {
|
||||
Object nextLevel = source.get(key);
|
||||
if (nextLevel instanceof Map<?, ?>) {
|
||||
return (Map<String, Object>) source.get(key);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,86 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
|
||||
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
class ExtractedFields {
|
||||
|
||||
private final ExtractedField timeField;
|
||||
private final List<ExtractedField> allFields;
|
||||
|
||||
ExtractedFields(ExtractedField timeField, List<ExtractedField> allFields) {
|
||||
if (!allFields.contains(timeField)) {
|
||||
throw new IllegalArgumentException("timeField should also be contained in allFields");
|
||||
}
|
||||
this.timeField = Objects.requireNonNull(timeField);
|
||||
this.allFields = Collections.unmodifiableList(allFields);
|
||||
}
|
||||
|
||||
public List<ExtractedField> getAllFields() {
|
||||
return allFields;
|
||||
}
|
||||
|
||||
public String[] getSourceFields() {
|
||||
return filterFields(ExtractedField.ExtractionMethod.SOURCE);
|
||||
}
|
||||
|
||||
public String[] getDocValueFields() {
|
||||
return filterFields(ExtractedField.ExtractionMethod.DOC_VALUE);
|
||||
}
|
||||
|
||||
private String[] filterFields(ExtractedField.ExtractionMethod method) {
|
||||
List<String> result = new ArrayList<>();
|
||||
for (ExtractedField field : allFields) {
|
||||
if (field.getExtractionMethod() == method) {
|
||||
result.add(field.getName());
|
||||
}
|
||||
}
|
||||
return result.toArray(new String[result.size()]);
|
||||
}
|
||||
|
||||
public String timeField() {
|
||||
return timeField.getName();
|
||||
}
|
||||
|
||||
public Long timeFieldValue(SearchHit hit) {
|
||||
Object[] value = timeField.value(hit);
|
||||
if (value.length != 1) {
|
||||
throw new RuntimeException("Time field [" + timeField.getName() + "] expected a single value; actual was: "
|
||||
+ Arrays.toString(value));
|
||||
}
|
||||
if (value[0] instanceof Long) {
|
||||
return (Long) value[0];
|
||||
}
|
||||
throw new RuntimeException("Time field [" + timeField.getName() + "] expected a long value; actual was: " + value[0]);
|
||||
}
|
||||
|
||||
public static ExtractedFields build(Job job, DatafeedConfig datafeedConfig) {
|
||||
Set<String> scriptFields = datafeedConfig.getScriptFields().stream().map(sf -> sf.fieldName()).collect(Collectors.toSet());
|
||||
String timeField = job.getDataDescription().getTimeField();
|
||||
ExtractedField timeExtractedField = ExtractedField.newTimeField(timeField, scriptFields.contains(timeField) ?
|
||||
ExtractedField.ExtractionMethod.SCRIPT_FIELD : ExtractedField.ExtractionMethod.DOC_VALUE);
|
||||
List<String> remainingFields = job.allFields().stream().filter(f -> !f.equals(timeField)).collect(Collectors.toList());
|
||||
List<ExtractedField> allExtractedFields = new ArrayList<>(remainingFields.size());
|
||||
allExtractedFields.add(timeExtractedField);
|
||||
for (String field : remainingFields) {
|
||||
ExtractedField.ExtractionMethod method = scriptFields.contains(field) ? ExtractedField.ExtractionMethod.SCRIPT_FIELD :
|
||||
datafeedConfig.isSource() ? ExtractedField.ExtractionMethod.SOURCE : ExtractedField.ExtractionMethod.DOC_VALUE;
|
||||
allExtractedFields.add(ExtractedField.newField(field, method));
|
||||
}
|
||||
return new ExtractedFields(timeExtractedField, allExtractedFields);
|
||||
}
|
||||
}
|
@ -0,0 +1,164 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.ClearScrollAction;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* An implementation that extracts data from elasticsearch using search and scroll on a client.
|
||||
* It supports safe and responsive cancellation by continuing the scroll until a new timestamp
|
||||
* is seen.
|
||||
* Note that this class is NOT thread-safe.
|
||||
*/
|
||||
class ScrollDataExtractor implements DataExtractor {
|
||||
|
||||
private static final Logger LOGGER = Loggers.getLogger(ScrollDataExtractor.class);
|
||||
private static final TimeValue SCROLL_TIMEOUT = new TimeValue(10, TimeUnit.MINUTES);
|
||||
|
||||
private final Client client;
|
||||
private final ScrollDataExtractorContext context;
|
||||
private String scrollId;
|
||||
private boolean isCancelled;
|
||||
private boolean hasNext;
|
||||
private Long timestampOnCancel;
|
||||
|
||||
ScrollDataExtractor(Client client, ScrollDataExtractorContext dataExtractorContext) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.context = Objects.requireNonNull(dataExtractorContext);
|
||||
this.hasNext = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return hasNext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCancelled() {
|
||||
return isCancelled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cancel() {
|
||||
LOGGER.trace("[{}] Data extractor received cancel request", context.jobId);
|
||||
isCancelled = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<InputStream> next() throws IOException {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
Optional<InputStream> stream = scrollId == null ? Optional.ofNullable(initScroll()) : Optional.ofNullable(continueScroll());
|
||||
if (!stream.isPresent()) {
|
||||
hasNext = false;
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
private InputStream initScroll() throws IOException {
|
||||
LOGGER.debug("[{}] Initializing scroll", context.jobId);
|
||||
SearchResponse searchResponse = executeSearchRequest(buildSearchRequest());
|
||||
return processSearchResponse(searchResponse);
|
||||
}
|
||||
|
||||
protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) {
|
||||
return searchRequestBuilder.get();
|
||||
}
|
||||
|
||||
private SearchRequestBuilder buildSearchRequest() {
|
||||
SearchRequestBuilder searchRequestBuilder = SearchAction.INSTANCE.newRequestBuilder(client)
|
||||
.setScroll(SCROLL_TIMEOUT)
|
||||
.addSort(context.extractedFields.timeField(), SortOrder.ASC)
|
||||
.setIndices(context.indexes)
|
||||
.setTypes(context.types)
|
||||
.setSize(context.scrollSize)
|
||||
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(
|
||||
context.query, context.extractedFields.timeField(), context.start, context.end));
|
||||
|
||||
for (String docValueField : context.extractedFields.getDocValueFields()) {
|
||||
searchRequestBuilder.addDocValueField(docValueField);
|
||||
}
|
||||
String[] sourceFields = context.extractedFields.getSourceFields();
|
||||
if (sourceFields.length == 0) {
|
||||
searchRequestBuilder.setFetchSource(false);
|
||||
searchRequestBuilder.storedFields(StoredFieldsContext._NONE_);
|
||||
} else {
|
||||
searchRequestBuilder.setFetchSource(sourceFields, null);
|
||||
}
|
||||
context.scriptFields.forEach(f -> searchRequestBuilder.addScriptField(f.fieldName(), f.script()));
|
||||
return searchRequestBuilder;
|
||||
}
|
||||
|
||||
private InputStream processSearchResponse(SearchResponse searchResponse) throws IOException {
|
||||
ExtractorUtils.checkSearchWasSuccessful(context.jobId, searchResponse);
|
||||
scrollId = searchResponse.getScrollId();
|
||||
if (searchResponse.getHits().getHits().length == 0) {
|
||||
hasNext = false;
|
||||
clearScroll(scrollId);
|
||||
return null;
|
||||
}
|
||||
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
try (SearchHitToJsonProcessor hitProcessor = new SearchHitToJsonProcessor(context.extractedFields, outputStream)) {
|
||||
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
||||
if (isCancelled) {
|
||||
Long timestamp = context.extractedFields.timeFieldValue(hit);
|
||||
if (timestamp != null) {
|
||||
if (timestampOnCancel == null) {
|
||||
timestampOnCancel = timestamp;
|
||||
} else if (timestamp != timestampOnCancel) {
|
||||
hasNext = false;
|
||||
clearScroll(scrollId);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
hitProcessor.process(hit);
|
||||
}
|
||||
}
|
||||
return new ByteArrayInputStream(outputStream.toByteArray());
|
||||
}
|
||||
|
||||
private InputStream continueScroll() throws IOException {
|
||||
LOGGER.debug("[{}] Continuing scroll with id [{}]", context.jobId, scrollId);
|
||||
SearchResponse searchResponse = executeSearchScrollRequest(scrollId);
|
||||
return processSearchResponse(searchResponse);
|
||||
}
|
||||
|
||||
protected SearchResponse executeSearchScrollRequest(String scrollId) {
|
||||
return SearchScrollAction.INSTANCE.newRequestBuilder(client)
|
||||
.setScroll(SCROLL_TIMEOUT)
|
||||
.setScrollId(scrollId)
|
||||
.get();
|
||||
}
|
||||
|
||||
void clearScroll(String scrollId) {
|
||||
ClearScrollAction.INSTANCE.newRequestBuilder(client).addScrollId(scrollId).get();
|
||||
}
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
|
||||
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
class ScrollDataExtractorContext {
|
||||
|
||||
final String jobId;
|
||||
final ExtractedFields extractedFields;
|
||||
final String[] indexes;
|
||||
final String[] types;
|
||||
final QueryBuilder query;
|
||||
final List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
final int scrollSize;
|
||||
final long start;
|
||||
final long end;
|
||||
|
||||
ScrollDataExtractorContext(String jobId, ExtractedFields extractedFields, List<String> indexes, List<String> types,
|
||||
QueryBuilder query, List<SearchSourceBuilder.ScriptField> scriptFields, int scrollSize,
|
||||
long start, long end) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.extractedFields = Objects.requireNonNull(extractedFields);
|
||||
this.indexes = indexes.toArray(new String[indexes.size()]);
|
||||
this.types = types.toArray(new String[types.size()]);
|
||||
this.query = Objects.requireNonNull(query);
|
||||
this.scriptFields = Objects.requireNonNull(scriptFields);
|
||||
this.scrollSize = scrollSize;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
}
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class ScrollDataExtractorFactory implements DataExtractorFactory {
|
||||
|
||||
private final Client client;
|
||||
private final DatafeedConfig datafeedConfig;
|
||||
private final Job job;
|
||||
private final ExtractedFields extractedFields;
|
||||
|
||||
public ScrollDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.datafeedConfig = Objects.requireNonNull(datafeedConfig);
|
||||
this.job = Objects.requireNonNull(job);
|
||||
this.extractedFields = ExtractedFields.build(job, datafeedConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataExtractor newExtractor(long start, long end) {
|
||||
ScrollDataExtractorContext dataExtractorContext = new ScrollDataExtractorContext(
|
||||
job.getId(),
|
||||
extractedFields,
|
||||
datafeedConfig.getIndexes(),
|
||||
datafeedConfig.getTypes(),
|
||||
datafeedConfig.getQuery(),
|
||||
datafeedConfig.getScriptFields(),
|
||||
datafeedConfig.getScrollSize(),
|
||||
start,
|
||||
end);
|
||||
return new ScrollDataExtractor(client, dataExtractorContext);
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.datafeed.extractor.scroll;
|
||||
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Objects;
|
||||
|
||||
class SearchHitToJsonProcessor implements Releasable {
|
||||
|
||||
private final ExtractedFields fields;
|
||||
private final XContentBuilder jsonBuilder;
|
||||
|
||||
SearchHitToJsonProcessor(ExtractedFields fields, OutputStream outputStream) throws IOException {
|
||||
this.fields = Objects.requireNonNull(fields);
|
||||
this.jsonBuilder = new XContentBuilder(JsonXContent.jsonXContent, outputStream);
|
||||
}
|
||||
|
||||
public void process(SearchHit hit) throws IOException {
|
||||
jsonBuilder.startObject();
|
||||
for (ExtractedField field : fields.getAllFields()) {
|
||||
writeKeyValue(field.getName(), field.value(hit));
|
||||
}
|
||||
jsonBuilder.endObject();
|
||||
}
|
||||
|
||||
private void writeKeyValue(String key, Object... values) throws IOException {
|
||||
if (values.length == 0) {
|
||||
return;
|
||||
}
|
||||
if (values.length == 1) {
|
||||
jsonBuilder.field(key, values[0]);
|
||||
} else {
|
||||
jsonBuilder.array(key, values);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
jsonBuilder.close();
|
||||
}
|
||||
}
|
@ -0,0 +1,382 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job;
|
||||
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ack.AckedRequest;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.ml.action.DeleteJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.PutJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.RevertModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.ml.action.UpdateJobStateAction;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.config.IgnoreDowntime;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobUpdate;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.Allocation;
|
||||
import org.elasticsearch.xpack.ml.job.metadata.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobStorageDeletionTask;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
|
||||
import org.elasticsearch.xpack.ml.notifications.Auditor;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Allows interactions with jobs. The managed interactions include:
|
||||
* <ul>
|
||||
* <li>creation</li>
|
||||
* <li>deletion</li>
|
||||
* <li>updating</li>
|
||||
* <li>starting/stopping of datafeed jobs</li>
|
||||
* </ul>
|
||||
*/
|
||||
public class JobManager extends AbstractComponent {
|
||||
|
||||
/**
|
||||
* Field name in which to store the API version in the usage info
|
||||
*/
|
||||
public static final String APP_VER_FIELDNAME = "appVer";
|
||||
|
||||
public static final String DEFAULT_RECORD_SORT_FIELD = AnomalyRecord.PROBABILITY.getPreferredName();
|
||||
|
||||
private final JobProvider jobProvider;
|
||||
private final ClusterService clusterService;
|
||||
private final JobResultsPersister jobResultsPersister;
|
||||
|
||||
|
||||
/**
|
||||
* Create a JobManager
|
||||
*/
|
||||
public JobManager(Settings settings, JobProvider jobProvider, JobResultsPersister jobResultsPersister,
|
||||
ClusterService clusterService) {
|
||||
super(settings);
|
||||
this.jobProvider = Objects.requireNonNull(jobProvider);
|
||||
this.clusterService = clusterService;
|
||||
this.jobResultsPersister = jobResultsPersister;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the jobs that match the given {@code jobId}.
|
||||
* Note that when the {@code jocId} is {@link Job#ALL} all jobs are returned.
|
||||
*
|
||||
* @param jobId
|
||||
* the jobId
|
||||
* @return A {@link QueryPage} containing the matching {@code Job}s
|
||||
*/
|
||||
public QueryPage<Job> getJob(String jobId, ClusterState clusterState) {
|
||||
if (jobId.equals(Job.ALL)) {
|
||||
return getJobs(clusterState);
|
||||
}
|
||||
MlMetadata mlMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE);
|
||||
Job job = mlMetadata.getJobs().get(jobId);
|
||||
if (job == null) {
|
||||
logger.debug(String.format(Locale.ROOT, "Cannot find job '%s'", jobId));
|
||||
throw ExceptionsHelper.missingJobException(jobId);
|
||||
}
|
||||
|
||||
logger.debug("Returning job [" + jobId + "]");
|
||||
return new QueryPage<>(Collections.singletonList(job), 1, Job.RESULTS_FIELD);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get details of all Jobs.
|
||||
*
|
||||
* @return A query page object with hitCount set to the total number of jobs
|
||||
* not the only the number returned here as determined by the
|
||||
* <code>size</code> parameter.
|
||||
*/
|
||||
public QueryPage<Job> getJobs(ClusterState clusterState) {
|
||||
MlMetadata mlMetadata = clusterState.getMetaData().custom(MlMetadata.TYPE);
|
||||
List<Job> jobs = mlMetadata.getJobs().entrySet().stream()
|
||||
.map(Map.Entry::getValue)
|
||||
.collect(Collectors.toList());
|
||||
return new QueryPage<>(jobs, mlMetadata.getJobs().size(), Job.RESULTS_FIELD);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the non-null {@code Job} object for the given
|
||||
* {@code jobId} or throws
|
||||
* {@link org.elasticsearch.ResourceNotFoundException}
|
||||
*
|
||||
* @param jobId
|
||||
* the jobId
|
||||
* @return the {@code Job} if a job with the given {@code jobId}
|
||||
* exists
|
||||
* @throws org.elasticsearch.ResourceNotFoundException
|
||||
* if there is no job with matching the given {@code jobId}
|
||||
*/
|
||||
public Job getJobOrThrowIfUnknown(String jobId) {
|
||||
return getJobOrThrowIfUnknown(clusterService.state(), jobId);
|
||||
}
|
||||
|
||||
public Allocation getJobAllocation(String jobId) {
|
||||
return getAllocation(clusterService.state(), jobId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the non-null {@code Job} object for the given
|
||||
* {@code jobId} or throws
|
||||
* {@link org.elasticsearch.ResourceNotFoundException}
|
||||
*
|
||||
* @param jobId
|
||||
* the jobId
|
||||
* @return the {@code Job} if a job with the given {@code jobId}
|
||||
* exists
|
||||
* @throws org.elasticsearch.ResourceNotFoundException
|
||||
* if there is no job with matching the given {@code jobId}
|
||||
*/
|
||||
Job getJobOrThrowIfUnknown(ClusterState clusterState, String jobId) {
|
||||
MlMetadata mlMetadata = clusterState.metaData().custom(MlMetadata.TYPE);
|
||||
Job job = mlMetadata.getJobs().get(jobId);
|
||||
if (job == null) {
|
||||
throw ExceptionsHelper.missingJobException(jobId);
|
||||
}
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores a job in the cluster state
|
||||
*/
|
||||
public void putJob(PutJobAction.Request request, ActionListener<PutJobAction.Response> actionListener) {
|
||||
Job job = request.getJob();
|
||||
|
||||
ActionListener<Boolean> createResultsIndexListener = ActionListener.wrap(jobSaved ->
|
||||
jobProvider.createJobResultIndex(job, new ActionListener<Boolean>() {
|
||||
@Override
|
||||
public void onResponse(Boolean indicesCreated) {
|
||||
audit(job.getId()).info(Messages.getMessage(Messages.JOB_AUDIT_CREATED));
|
||||
|
||||
// Also I wonder if we need to audit log infra
|
||||
// structure in ml as when we merge into xpack
|
||||
// we can use its audit trailing. See:
|
||||
// https://github.com/elastic/prelert-legacy/issues/48
|
||||
actionListener.onResponse(new PutJobAction.Response(jobSaved && indicesCreated, job));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
actionListener.onFailure(e);
|
||||
}
|
||||
}), actionListener::onFailure);
|
||||
|
||||
clusterService.submitStateUpdateTask("put-job-" + job.getId(),
|
||||
new AckedClusterStateUpdateTask<Boolean>(request, createResultsIndexListener) {
|
||||
@Override
|
||||
protected Boolean newResponse(boolean acknowledged) {
|
||||
return acknowledged;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
ClusterState cs = updateClusterState(job, false, currentState);
|
||||
if (currentState.metaData().index(AnomalyDetectorsIndex.jobResultsIndexName(job.getIndexName())) != null) {
|
||||
throw new ResourceAlreadyExistsException(Messages.getMessage(Messages.JOB_INDEX_ALREADY_EXISTS,
|
||||
AnomalyDetectorsIndex.jobResultsIndexName(job.getIndexName())));
|
||||
}
|
||||
return cs;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void updateJob(String jobId, JobUpdate jobUpdate, AckedRequest request, ActionListener<PutJobAction.Response> actionListener) {
|
||||
clusterService.submitStateUpdateTask("update-job-" + jobId,
|
||||
new AckedClusterStateUpdateTask<PutJobAction.Response>(request, actionListener) {
|
||||
private Job updatedJob;
|
||||
|
||||
@Override
|
||||
protected PutJobAction.Response newResponse(boolean acknowledged) {
|
||||
return new PutJobAction.Response(acknowledged, updatedJob);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
Job job = getJob(jobId, currentState).results().get(0);
|
||||
updatedJob = jobUpdate.mergeWithJob(job);
|
||||
return updateClusterState(updatedJob, true, currentState);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ClusterState updateClusterState(Job job, boolean overwrite, ClusterState currentState) {
|
||||
MlMetadata.Builder builder = createMlMetadataBuilder(currentState);
|
||||
builder.putJob(job, overwrite);
|
||||
return buildNewClusterState(currentState, builder);
|
||||
}
|
||||
|
||||
|
||||
public void deleteJob(DeleteJobAction.Request request, Client client, JobStorageDeletionTask task,
|
||||
ActionListener<DeleteJobAction.Response> actionListener) {
|
||||
|
||||
String jobId = request.getJobId();
|
||||
String indexName = AnomalyDetectorsIndex.jobResultsIndexName(jobId);
|
||||
logger.debug("Deleting job '" + jobId + "'");
|
||||
|
||||
// Step 3. When the job has been removed from the cluster state, return a response
|
||||
// -------
|
||||
CheckedConsumer<Boolean, Exception> apiResponseHandler = jobDeleted -> {
|
||||
if (jobDeleted) {
|
||||
logger.info("Job [" + jobId + "] deleted.");
|
||||
actionListener.onResponse(new DeleteJobAction.Response(true));
|
||||
audit(jobId).info(Messages.getMessage(Messages.JOB_AUDIT_DELETED));
|
||||
} else {
|
||||
actionListener.onResponse(new DeleteJobAction.Response(false));
|
||||
}
|
||||
};
|
||||
|
||||
// Step 2. When the physical storage has been deleted, remove from Cluster State
|
||||
// -------
|
||||
CheckedConsumer<Boolean, Exception> deleteJobStateHandler = response -> clusterService.submitStateUpdateTask("delete-job-" + jobId,
|
||||
new AckedClusterStateUpdateTask<Boolean>(request, ActionListener.wrap(apiResponseHandler, actionListener::onFailure)) {
|
||||
|
||||
@Override
|
||||
protected Boolean newResponse(boolean acknowledged) {
|
||||
return acknowledged && response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
return removeJobFromState(jobId, currentState);
|
||||
}
|
||||
});
|
||||
|
||||
// Step 1. When the job's status updates to DELETING, begin deleting the physical storage
|
||||
// -------
|
||||
CheckedConsumer<UpdateJobStateAction.Response, Exception> updateHandler = response -> {
|
||||
// Successfully updated the status to DELETING, begin actually deleting
|
||||
if (response.isAcknowledged()) {
|
||||
logger.info("Job [" + jobId + "] set to [" + JobState.DELETING + "]");
|
||||
} else {
|
||||
logger.warn("Job [" + jobId + "] change to [" + JobState.DELETING + "] was not acknowledged.");
|
||||
}
|
||||
|
||||
// This task manages the physical deletion of the job (removing the results, then the index)
|
||||
task.delete(jobId, indexName, client, deleteJobStateHandler::accept, actionListener::onFailure);
|
||||
|
||||
};
|
||||
|
||||
// Step 0. Kick off the chain of callbacks with the initial UpdateStatus call
|
||||
// -------
|
||||
UpdateJobStateAction.Request updateStateListener = new UpdateJobStateAction.Request(jobId, JobState.DELETING);
|
||||
setJobState(updateStateListener, ActionListener.wrap(updateHandler, actionListener::onFailure));
|
||||
|
||||
}
|
||||
|
||||
ClusterState removeJobFromState(String jobId, ClusterState currentState) {
|
||||
MlMetadata.Builder builder = createMlMetadataBuilder(currentState);
|
||||
builder.deleteJob(jobId);
|
||||
return buildNewClusterState(currentState, builder);
|
||||
}
|
||||
|
||||
private Allocation getAllocation(ClusterState state, String jobId) {
|
||||
MlMetadata mlMetadata = state.metaData().custom(MlMetadata.TYPE);
|
||||
Allocation allocation = mlMetadata.getAllocations().get(jobId);
|
||||
if (allocation == null) {
|
||||
throw new ResourceNotFoundException("No allocation found for job with id [" + jobId + "]");
|
||||
}
|
||||
return allocation;
|
||||
}
|
||||
|
||||
public Auditor audit(String jobId) {
|
||||
return jobProvider.audit(jobId);
|
||||
}
|
||||
|
||||
public void revertSnapshot(RevertModelSnapshotAction.Request request, ActionListener<RevertModelSnapshotAction.Response> actionListener,
|
||||
ModelSnapshot modelSnapshot) {
|
||||
|
||||
clusterService.submitStateUpdateTask("revert-snapshot-" + request.getJobId(),
|
||||
new AckedClusterStateUpdateTask<RevertModelSnapshotAction.Response>(request, actionListener) {
|
||||
|
||||
@Override
|
||||
protected RevertModelSnapshotAction.Response newResponse(boolean acknowledged) {
|
||||
if (acknowledged) {
|
||||
audit(request.getJobId())
|
||||
.info(Messages.getMessage(Messages.JOB_AUDIT_REVERTED, modelSnapshot.getDescription()));
|
||||
return new RevertModelSnapshotAction.Response(modelSnapshot);
|
||||
}
|
||||
throw new IllegalStateException("Could not revert modelSnapshot on job ["
|
||||
+ request.getJobId() + "], not acknowledged by master.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
Job job = getJobOrThrowIfUnknown(currentState, request.getJobId());
|
||||
Job.Builder builder = new Job.Builder(job);
|
||||
builder.setModelSnapshotId(modelSnapshot.getSnapshotId());
|
||||
if (request.getDeleteInterveningResults()) {
|
||||
builder.setIgnoreDowntime(IgnoreDowntime.NEVER);
|
||||
} else {
|
||||
builder.setIgnoreDowntime(IgnoreDowntime.ONCE);
|
||||
}
|
||||
|
||||
return updateClusterState(builder.build(), true, currentState);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void setJobState(UpdateJobStateAction.Request request, ActionListener<UpdateJobStateAction.Response> actionListener) {
|
||||
clusterService.submitStateUpdateTask("set-job-state-" + request.getState() + "-" + request.getJobId(),
|
||||
new AckedClusterStateUpdateTask<UpdateJobStateAction.Response>(request, actionListener) {
|
||||
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
MlMetadata.Builder builder = new MlMetadata.Builder(currentState.metaData().custom(MlMetadata.TYPE));
|
||||
builder.updateState(request.getJobId(), request.getState(), request.getReason());
|
||||
return ClusterState.builder(currentState)
|
||||
.metaData(MetaData.builder(currentState.metaData()).putCustom(MlMetadata.TYPE, builder.build()))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UpdateJobStateAction.Response newResponse(boolean acknowledged) {
|
||||
return new UpdateJobStateAction.Response(acknowledged);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a persisted model snapshot metadata document to match the
|
||||
* argument supplied.
|
||||
*
|
||||
* @param modelSnapshot the updated model snapshot object to be stored
|
||||
*/
|
||||
public void updateModelSnapshot(ModelSnapshot modelSnapshot, Consumer<Boolean> handler, Consumer<Exception> errorHandler) {
|
||||
jobResultsPersister.updateModelSnapshot(modelSnapshot, handler, errorHandler);
|
||||
}
|
||||
|
||||
private static MlMetadata.Builder createMlMetadataBuilder(ClusterState currentState) {
|
||||
MlMetadata currentMlMetadata = currentState.metaData().custom(MlMetadata.TYPE);
|
||||
return new MlMetadata.Builder(currentMlMetadata);
|
||||
}
|
||||
|
||||
private static ClusterState buildNewClusterState(ClusterState currentState, MlMetadata.Builder builder) {
|
||||
ClusterState.Builder newState = ClusterState.builder(currentState);
|
||||
newState.metaData(MetaData.builder(currentState.getMetaData()).putCustom(MlMetadata.TYPE, builder.build()).build());
|
||||
return newState.build();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,713 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
/**
|
||||
* Autodetect analysis configuration options describes which fields are
|
||||
* analysed and the functions to use.
|
||||
* <p>
|
||||
* The configuration can contain multiple detectors, a new anomaly detector will
|
||||
* be created for each detector configuration. The fields
|
||||
* <code>bucketSpan, batchSpan, summaryCountFieldName and categorizationFieldName</code>
|
||||
* apply to all detectors.
|
||||
* <p>
|
||||
* If a value has not been set it will be <code>null</code>
|
||||
* Object wrappers are used around integral types & booleans so they can take
|
||||
* <code>null</code> values.
|
||||
*/
|
||||
public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
||||
/**
|
||||
* Serialisation names
|
||||
*/
|
||||
private static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
|
||||
private static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
private static final ParseField BATCH_SPAN = new ParseField("batch_span");
|
||||
private static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name");
|
||||
public static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters");
|
||||
private static final ParseField LATENCY = new ParseField("latency");
|
||||
private static final ParseField PERIOD = new ParseField("period");
|
||||
private static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name");
|
||||
private static final ParseField DETECTORS = new ParseField("detectors");
|
||||
private static final ParseField INFLUENCERS = new ParseField("influencers");
|
||||
private static final ParseField OVERLAPPING_BUCKETS = new ParseField("overlapping_buckets");
|
||||
private static final ParseField RESULT_FINALIZATION_WINDOW = new ParseField("result_finalization_window");
|
||||
private static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields");
|
||||
private static final ParseField MULTIPLE_BUCKET_SPANS = new ParseField("multiple_bucket_spans");
|
||||
private static final ParseField USER_PER_PARTITION_NORMALIZATION = new ParseField("use_per_partition_normalization");
|
||||
|
||||
private static final String ML_CATEGORY_FIELD = "mlcategory";
|
||||
public static final Set<String> AUTO_CREATED_FIELDS = new HashSet<>(Arrays.asList(ML_CATEGORY_FIELD));
|
||||
|
||||
public static final long DEFAULT_RESULT_FINALIZATION_WINDOW = 2L;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<AnalysisConfig.Builder, Void> PARSER =
|
||||
new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), a -> new AnalysisConfig.Builder((List<Detector>) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> Detector.PARSER.apply(p, c).build(), DETECTORS);
|
||||
PARSER.declareLong(Builder::setBucketSpan, BUCKET_SPAN);
|
||||
PARSER.declareLong(Builder::setBatchSpan, BATCH_SPAN);
|
||||
PARSER.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME);
|
||||
PARSER.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS);
|
||||
PARSER.declareLong(Builder::setLatency, LATENCY);
|
||||
PARSER.declareLong(Builder::setPeriod, PERIOD);
|
||||
PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME);
|
||||
PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS);
|
||||
PARSER.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS);
|
||||
PARSER.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW);
|
||||
PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS);
|
||||
PARSER.declareLongArray(Builder::setMultipleBucketSpans, MULTIPLE_BUCKET_SPANS);
|
||||
PARSER.declareBoolean(Builder::setUsePerPartitionNormalization, USER_PER_PARTITION_NORMALIZATION);
|
||||
}
|
||||
|
||||
/**
|
||||
* These values apply to all detectors
|
||||
*/
|
||||
private final long bucketSpan;
|
||||
private final Long batchSpan;
|
||||
private final String categorizationFieldName;
|
||||
private final List<String> categorizationFilters;
|
||||
private final long latency;
|
||||
private final Long period;
|
||||
private final String summaryCountFieldName;
|
||||
private final List<Detector> detectors;
|
||||
private final List<String> influencers;
|
||||
private final Boolean overlappingBuckets;
|
||||
private final Long resultFinalizationWindow;
|
||||
private final Boolean multivariateByFields;
|
||||
private final List<Long> multipleBucketSpans;
|
||||
private final boolean usePerPartitionNormalization;
|
||||
|
||||
private AnalysisConfig(Long bucketSpan, Long batchSpan, String categorizationFieldName, List<String> categorizationFilters,
|
||||
long latency, Long period, String summaryCountFieldName, List<Detector> detectors, List<String> influencers,
|
||||
Boolean overlappingBuckets, Long resultFinalizationWindow, Boolean multivariateByFields,
|
||||
List<Long> multipleBucketSpans, boolean usePerPartitionNormalization) {
|
||||
this.detectors = detectors;
|
||||
this.bucketSpan = bucketSpan;
|
||||
this.batchSpan = batchSpan;
|
||||
this.latency = latency;
|
||||
this.period = period;
|
||||
this.categorizationFieldName = categorizationFieldName;
|
||||
this.categorizationFilters = categorizationFilters;
|
||||
this.summaryCountFieldName = summaryCountFieldName;
|
||||
this.influencers = influencers;
|
||||
this.overlappingBuckets = overlappingBuckets;
|
||||
this.resultFinalizationWindow = resultFinalizationWindow;
|
||||
this.multivariateByFields = multivariateByFields;
|
||||
this.multipleBucketSpans = multipleBucketSpans;
|
||||
this.usePerPartitionNormalization = usePerPartitionNormalization;
|
||||
}
|
||||
|
||||
public AnalysisConfig(StreamInput in) throws IOException {
|
||||
bucketSpan = in.readLong();
|
||||
batchSpan = in.readOptionalLong();
|
||||
categorizationFieldName = in.readOptionalString();
|
||||
categorizationFilters = in.readBoolean() ? in.readList(StreamInput::readString) : null;
|
||||
latency = in.readLong();
|
||||
period = in.readOptionalLong();
|
||||
summaryCountFieldName = in.readOptionalString();
|
||||
detectors = in.readList(Detector::new);
|
||||
influencers = in.readList(StreamInput::readString);
|
||||
overlappingBuckets = in.readOptionalBoolean();
|
||||
resultFinalizationWindow = in.readOptionalLong();
|
||||
multivariateByFields = in.readOptionalBoolean();
|
||||
multipleBucketSpans = in.readBoolean() ? in.readList(StreamInput::readLong) : null;
|
||||
usePerPartitionNormalization = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(bucketSpan);
|
||||
out.writeOptionalLong(batchSpan);
|
||||
out.writeOptionalString(categorizationFieldName);
|
||||
if (categorizationFilters != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeStringList(categorizationFilters);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeLong(latency);
|
||||
out.writeOptionalLong(period);
|
||||
out.writeOptionalString(summaryCountFieldName);
|
||||
out.writeList(detectors);
|
||||
out.writeStringList(influencers);
|
||||
out.writeOptionalBoolean(overlappingBuckets);
|
||||
out.writeOptionalLong(resultFinalizationWindow);
|
||||
out.writeOptionalBoolean(multivariateByFields);
|
||||
if (multipleBucketSpans != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeVInt(multipleBucketSpans.size());
|
||||
for (Long bucketSpan : multipleBucketSpans) {
|
||||
out.writeLong(bucketSpan);
|
||||
}
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeBoolean(usePerPartitionNormalization);
|
||||
}
|
||||
|
||||
/**
|
||||
* The size of the interval the analysis is aggregated into measured in
|
||||
* seconds
|
||||
*
|
||||
* @return The bucketspan or <code>null</code> if not set
|
||||
*/
|
||||
public Long getBucketSpan() {
|
||||
return bucketSpan;
|
||||
}
|
||||
|
||||
public long getBucketSpanOrDefault() {
|
||||
return bucketSpan;
|
||||
}
|
||||
|
||||
/**
|
||||
* Interval into which to batch seasonal data measured in seconds
|
||||
*
|
||||
* @return The batchspan or <code>null</code> if not set
|
||||
*/
|
||||
public Long getBatchSpan() {
|
||||
return batchSpan;
|
||||
}
|
||||
|
||||
public String getCategorizationFieldName() {
|
||||
return categorizationFieldName;
|
||||
}
|
||||
|
||||
public List<String> getCategorizationFilters() {
|
||||
return categorizationFilters;
|
||||
}
|
||||
|
||||
/**
|
||||
* The latency interval (seconds) during which out-of-order records should be handled.
|
||||
*
|
||||
* @return The latency interval (seconds) or <code>null</code> if not set
|
||||
*/
|
||||
public Long getLatency() {
|
||||
return latency;
|
||||
}
|
||||
|
||||
/**
|
||||
* The repeat interval for periodic data in multiples of
|
||||
* {@linkplain #getBatchSpan()}
|
||||
*
|
||||
* @return The period or <code>null</code> if not set
|
||||
*/
|
||||
public Long getPeriod() {
|
||||
return period;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the field that contains counts for pre-summarised input
|
||||
*
|
||||
* @return The field name or <code>null</code> if not set
|
||||
*/
|
||||
public String getSummaryCountFieldName() {
|
||||
return summaryCountFieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of analysis detectors. In a valid configuration the list should
|
||||
* contain at least 1 {@link Detector}
|
||||
*
|
||||
* @return The Detectors used in this job
|
||||
*/
|
||||
public List<Detector> getDetectors() {
|
||||
return detectors;
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of influence field names
|
||||
*/
|
||||
public List<String> getInfluencers() {
|
||||
return influencers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the list of term fields.
|
||||
* These are the influencer fields, partition field,
|
||||
* by field and over field of each detector.
|
||||
* <code>null</code> and empty strings are filtered from the
|
||||
* config.
|
||||
*
|
||||
* @return Set of term fields - never <code>null</code>
|
||||
*/
|
||||
public Set<String> termFields() {
|
||||
Set<String> termFields = new TreeSet<>();
|
||||
|
||||
for (Detector d : getDetectors()) {
|
||||
addIfNotNull(termFields, d.getByFieldName());
|
||||
addIfNotNull(termFields, d.getOverFieldName());
|
||||
addIfNotNull(termFields, d.getPartitionFieldName());
|
||||
}
|
||||
|
||||
for (String i : getInfluencers()) {
|
||||
addIfNotNull(termFields, i);
|
||||
}
|
||||
|
||||
// remove empty strings
|
||||
termFields.remove("");
|
||||
|
||||
return termFields;
|
||||
}
|
||||
|
||||
public Set<String> extractReferencedFilters() {
|
||||
return detectors.stream().map(Detector::extractReferencedFilters)
|
||||
.flatMap(Set::stream).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
public Boolean getOverlappingBuckets() {
|
||||
return overlappingBuckets;
|
||||
}
|
||||
|
||||
public Long getResultFinalizationWindow() {
|
||||
return resultFinalizationWindow;
|
||||
}
|
||||
|
||||
public Boolean getMultivariateByFields() {
|
||||
return multivariateByFields;
|
||||
}
|
||||
|
||||
public List<Long> getMultipleBucketSpans() {
|
||||
return multipleBucketSpans;
|
||||
}
|
||||
|
||||
public boolean getUsePerPartitionNormalization() {
|
||||
return usePerPartitionNormalization;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the list of fields required by the analysis.
|
||||
* These are the influencer fields, metric field, partition field,
|
||||
* by field and over field of each detector, plus the summary count
|
||||
* field and the categorization field name of the job.
|
||||
* <code>null</code> and empty strings are filtered from the
|
||||
* config.
|
||||
*
|
||||
* @return List of required analysis fields - never <code>null</code>
|
||||
*/
|
||||
public List<String> analysisFields() {
|
||||
Set<String> analysisFields = termFields();
|
||||
|
||||
addIfNotNull(analysisFields, categorizationFieldName);
|
||||
addIfNotNull(analysisFields, summaryCountFieldName);
|
||||
|
||||
for (Detector d : getDetectors()) {
|
||||
addIfNotNull(analysisFields, d.getFieldName());
|
||||
}
|
||||
|
||||
// remove empty strings
|
||||
analysisFields.remove("");
|
||||
|
||||
return new ArrayList<>(analysisFields);
|
||||
}
|
||||
|
||||
private static void addIfNotNull(Set<String> fields, String field) {
|
||||
if (field != null) {
|
||||
fields.add(field);
|
||||
}
|
||||
}
|
||||
|
||||
public List<String> fields() {
|
||||
return collectNonNullAndNonEmptyDetectorFields(d -> d.getFieldName());
|
||||
}
|
||||
|
||||
private List<String> collectNonNullAndNonEmptyDetectorFields(
|
||||
Function<Detector, String> fieldGetter) {
|
||||
Set<String> fields = new HashSet<>();
|
||||
|
||||
for (Detector d : getDetectors()) {
|
||||
addIfNotNull(fields, fieldGetter.apply(d));
|
||||
}
|
||||
|
||||
// remove empty strings
|
||||
fields.remove("");
|
||||
|
||||
return new ArrayList<>(fields);
|
||||
}
|
||||
|
||||
public List<String> byFields() {
|
||||
return collectNonNullAndNonEmptyDetectorFields(d -> d.getByFieldName());
|
||||
}
|
||||
|
||||
public List<String> overFields() {
|
||||
return collectNonNullAndNonEmptyDetectorFields(d -> d.getOverFieldName());
|
||||
}
|
||||
|
||||
|
||||
public List<String> partitionFields() {
|
||||
return collectNonNullAndNonEmptyDetectorFields(d -> d.getPartitionFieldName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
if (batchSpan != null) {
|
||||
builder.field(BATCH_SPAN.getPreferredName(), batchSpan);
|
||||
}
|
||||
if (categorizationFieldName != null) {
|
||||
builder.field(CATEGORIZATION_FIELD_NAME.getPreferredName(), categorizationFieldName);
|
||||
}
|
||||
if (categorizationFilters != null) {
|
||||
builder.field(CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters);
|
||||
}
|
||||
builder.field(LATENCY.getPreferredName(), latency);
|
||||
if (period != null) {
|
||||
builder.field(PERIOD.getPreferredName(), period);
|
||||
}
|
||||
if (summaryCountFieldName != null) {
|
||||
builder.field(SUMMARY_COUNT_FIELD_NAME.getPreferredName(), summaryCountFieldName);
|
||||
}
|
||||
builder.field(DETECTORS.getPreferredName(), detectors);
|
||||
builder.field(INFLUENCERS.getPreferredName(), influencers);
|
||||
if (overlappingBuckets != null) {
|
||||
builder.field(OVERLAPPING_BUCKETS.getPreferredName(), overlappingBuckets);
|
||||
}
|
||||
if (resultFinalizationWindow != null) {
|
||||
builder.field(RESULT_FINALIZATION_WINDOW.getPreferredName(), resultFinalizationWindow);
|
||||
}
|
||||
if (multivariateByFields != null) {
|
||||
builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields);
|
||||
}
|
||||
if (multipleBucketSpans != null) {
|
||||
builder.field(MULTIPLE_BUCKET_SPANS.getPreferredName(), multipleBucketSpans);
|
||||
}
|
||||
builder.field(USER_PER_PARTITION_NORMALIZATION.getPreferredName(), usePerPartitionNormalization);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
AnalysisConfig that = (AnalysisConfig) o;
|
||||
return latency == that.latency &&
|
||||
usePerPartitionNormalization == that.usePerPartitionNormalization &&
|
||||
Objects.equals(bucketSpan, that.bucketSpan) &&
|
||||
Objects.equals(batchSpan, that.batchSpan) &&
|
||||
Objects.equals(categorizationFieldName, that.categorizationFieldName) &&
|
||||
Objects.equals(categorizationFilters, that.categorizationFilters) &&
|
||||
Objects.equals(period, that.period) &&
|
||||
Objects.equals(summaryCountFieldName, that.summaryCountFieldName) &&
|
||||
Objects.equals(detectors, that.detectors) &&
|
||||
Objects.equals(influencers, that.influencers) &&
|
||||
Objects.equals(overlappingBuckets, that.overlappingBuckets) &&
|
||||
Objects.equals(resultFinalizationWindow, that.resultFinalizationWindow) &&
|
||||
Objects.equals(multivariateByFields, that.multivariateByFields) &&
|
||||
Objects.equals(multipleBucketSpans, that.multipleBucketSpans);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(
|
||||
bucketSpan, batchSpan, categorizationFieldName, categorizationFilters, latency, period,
|
||||
summaryCountFieldName, detectors, influencers, overlappingBuckets, resultFinalizationWindow,
|
||||
multivariateByFields, multipleBucketSpans, usePerPartitionNormalization
|
||||
);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
public static final long DEFAULT_BUCKET_SPAN = 300L;
|
||||
|
||||
private List<Detector> detectors;
|
||||
private long bucketSpan = DEFAULT_BUCKET_SPAN;
|
||||
private Long batchSpan;
|
||||
private long latency = 0L;
|
||||
private Long period;
|
||||
private String categorizationFieldName;
|
||||
private List<String> categorizationFilters;
|
||||
private String summaryCountFieldName;
|
||||
private List<String> influencers = new ArrayList<>();
|
||||
private Boolean overlappingBuckets;
|
||||
private Long resultFinalizationWindow;
|
||||
private Boolean multivariateByFields;
|
||||
private List<Long> multipleBucketSpans;
|
||||
private boolean usePerPartitionNormalization = false;
|
||||
|
||||
public Builder(List<Detector> detectors) {
|
||||
this.detectors = detectors;
|
||||
}
|
||||
|
||||
public Builder(AnalysisConfig analysisConfig) {
|
||||
this.detectors = analysisConfig.detectors;
|
||||
this.bucketSpan = analysisConfig.bucketSpan;
|
||||
this.batchSpan = analysisConfig.batchSpan;
|
||||
this.latency = analysisConfig.latency;
|
||||
this.period = analysisConfig.period;
|
||||
this.categorizationFieldName = analysisConfig.categorizationFieldName;
|
||||
this.categorizationFilters = analysisConfig.categorizationFilters;
|
||||
this.summaryCountFieldName = analysisConfig.summaryCountFieldName;
|
||||
this.influencers = analysisConfig.influencers;
|
||||
this.overlappingBuckets = analysisConfig.overlappingBuckets;
|
||||
this.resultFinalizationWindow = analysisConfig.resultFinalizationWindow;
|
||||
this.multivariateByFields = analysisConfig.multivariateByFields;
|
||||
this.multipleBucketSpans = analysisConfig.multipleBucketSpans;
|
||||
this.usePerPartitionNormalization = analysisConfig.usePerPartitionNormalization;
|
||||
}
|
||||
|
||||
public void setDetectors(List<Detector> detectors) {
|
||||
this.detectors = detectors;
|
||||
}
|
||||
|
||||
public void setBucketSpan(long bucketSpan) {
|
||||
this.bucketSpan = bucketSpan;
|
||||
}
|
||||
|
||||
public void setBatchSpan(long batchSpan) {
|
||||
this.batchSpan = batchSpan;
|
||||
}
|
||||
|
||||
public void setLatency(long latency) {
|
||||
this.latency = latency;
|
||||
}
|
||||
|
||||
public void setPeriod(long period) {
|
||||
this.period = period;
|
||||
}
|
||||
|
||||
public void setCategorizationFieldName(String categorizationFieldName) {
|
||||
this.categorizationFieldName = categorizationFieldName;
|
||||
}
|
||||
|
||||
public void setCategorizationFilters(List<String> categorizationFilters) {
|
||||
this.categorizationFilters = categorizationFilters;
|
||||
}
|
||||
|
||||
public void setSummaryCountFieldName(String summaryCountFieldName) {
|
||||
this.summaryCountFieldName = summaryCountFieldName;
|
||||
}
|
||||
|
||||
public void setInfluencers(List<String> influencers) {
|
||||
this.influencers = influencers;
|
||||
}
|
||||
|
||||
public void setOverlappingBuckets(Boolean overlappingBuckets) {
|
||||
this.overlappingBuckets = overlappingBuckets;
|
||||
}
|
||||
|
||||
public void setResultFinalizationWindow(Long resultFinalizationWindow) {
|
||||
this.resultFinalizationWindow = resultFinalizationWindow;
|
||||
}
|
||||
|
||||
public void setMultivariateByFields(Boolean multivariateByFields) {
|
||||
this.multivariateByFields = multivariateByFields;
|
||||
}
|
||||
|
||||
public void setMultipleBucketSpans(List<Long> multipleBucketSpans) {
|
||||
this.multipleBucketSpans = multipleBucketSpans;
|
||||
}
|
||||
|
||||
public void setUsePerPartitionNormalization(boolean usePerPartitionNormalization) {
|
||||
this.usePerPartitionNormalization = usePerPartitionNormalization;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the configuration is valid
|
||||
* <ol>
|
||||
* <li>Check that if non-null BucketSpan, BatchSpan, Latency and Period are
|
||||
* >= 0</li>
|
||||
* <li>Check that if non-null Latency is <= MAX_LATENCY</li>
|
||||
* <li>Check there is at least one detector configured</li>
|
||||
* <li>Check all the detectors are configured correctly</li>
|
||||
* <li>Check that OVERLAPPING_BUCKETS is set appropriately</li>
|
||||
* <li>Check that MULTIPLE_BUCKETSPANS are set appropriately</li>
|
||||
* <li>If Per Partition normalization is configured at least one detector
|
||||
* must have a partition field and no influences can be used</li>
|
||||
* </ol>
|
||||
*/
|
||||
public AnalysisConfig build() {
|
||||
checkFieldIsNotNegativeIfSpecified(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
||||
checkFieldIsNotNegativeIfSpecified(BATCH_SPAN.getPreferredName(), batchSpan);
|
||||
checkFieldIsNotNegativeIfSpecified(LATENCY.getPreferredName(), latency);
|
||||
checkFieldIsNotNegativeIfSpecified(PERIOD.getPreferredName(), period);
|
||||
|
||||
verifyDetectorAreDefined(detectors);
|
||||
verifyFieldName(summaryCountFieldName);
|
||||
verifyFieldName(categorizationFieldName);
|
||||
|
||||
verifyCategorizationFilters(categorizationFilters, categorizationFieldName);
|
||||
verifyMultipleBucketSpans(multipleBucketSpans, bucketSpan);
|
||||
|
||||
overlappingBuckets = verifyOverlappingBucketsConfig(overlappingBuckets, detectors);
|
||||
|
||||
if (usePerPartitionNormalization) {
|
||||
checkDetectorsHavePartitionFields(detectors);
|
||||
checkNoInfluencersAreSet(influencers);
|
||||
}
|
||||
|
||||
return new AnalysisConfig(bucketSpan, batchSpan, categorizationFieldName, categorizationFilters,
|
||||
latency, period, summaryCountFieldName, detectors, influencers, overlappingBuckets,
|
||||
resultFinalizationWindow, multivariateByFields, multipleBucketSpans, usePerPartitionNormalization);
|
||||
}
|
||||
|
||||
private static void checkFieldIsNotNegativeIfSpecified(String fieldName, Long value) {
|
||||
if (value != null && value < 0) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, fieldName, 0, value);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyDetectorAreDefined(List<Detector> detectors) {
|
||||
if (detectors == null || detectors.isEmpty()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_NO_DETECTORS));
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyCategorizationFilters(List<String> filters, String categorizationFieldName) {
|
||||
if (filters == null || filters.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
verifyCategorizationFieldNameSetIfFiltersAreSet(categorizationFieldName);
|
||||
verifyCategorizationFiltersAreDistinct(filters);
|
||||
verifyCategorizationFiltersContainNoneEmpty(filters);
|
||||
verifyCategorizationFiltersAreValidRegex(filters);
|
||||
}
|
||||
|
||||
private static void verifyCategorizationFieldNameSetIfFiltersAreSet(String categorizationFieldName) {
|
||||
if (categorizationFieldName == null) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(
|
||||
Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_REQUIRE_CATEGORIZATION_FIELD_NAME));
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyCategorizationFiltersAreDistinct(List<String> filters) {
|
||||
if (filters.stream().distinct().count() != filters.size()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_DUPLICATES));
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyCategorizationFiltersContainNoneEmpty(List<String> filters) {
|
||||
if (filters.stream().anyMatch(f -> f.isEmpty())) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY));
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyCategorizationFiltersAreValidRegex(List<String> filters) {
|
||||
for (String filter : filters) {
|
||||
if (!isValidRegex(filter)) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_INVALID_REGEX, filter));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyMultipleBucketSpans(List<Long> multipleBucketSpans, Long bucketSpan) {
|
||||
if (multipleBucketSpans == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (bucketSpan == null) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_MULTIPLE_BUCKETSPANS_REQUIRE_BUCKETSPAN));
|
||||
}
|
||||
for (Long span : multipleBucketSpans) {
|
||||
if ((span % bucketSpan != 0L) || (span <= bucketSpan)) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_MULTIPLE_BUCKETSPANS_MUST_BE_MULTIPLE, span, bucketSpan));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean checkDetectorsHavePartitionFields(List<Detector> detectors) {
|
||||
for (Detector detector : detectors) {
|
||||
if (!Strings.isNullOrEmpty(detector.getPartitionFieldName())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException(Messages.getMessage(
|
||||
Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_REQUIRES_PARTITION_FIELD));
|
||||
}
|
||||
|
||||
private static boolean checkNoInfluencersAreSet(List<String> influencers) {
|
||||
if (!influencers.isEmpty()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(
|
||||
Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_CANNOT_USE_INFLUENCERS));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the characters used in a field name will not cause problems.
|
||||
*
|
||||
* @param field The field name to be validated
|
||||
* @return true
|
||||
*/
|
||||
public static boolean verifyFieldName(String field) throws ElasticsearchParseException {
|
||||
if (field != null && containsInvalidChar(field)) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_INVALID_FIELDNAME_CHARS, field, Detector.PROHIBITED));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static boolean containsInvalidChar(String field) {
|
||||
for (Character ch : Detector.PROHIBITED_FIELDNAME_CHARACTERS) {
|
||||
if (field.indexOf(ch) >= 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return field.chars().anyMatch(ch -> Character.isISOControl(ch));
|
||||
}
|
||||
|
||||
private static boolean isValidRegex(String exp) {
|
||||
try {
|
||||
Pattern.compile(exp);
|
||||
return true;
|
||||
} catch (PatternSyntaxException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static Boolean verifyOverlappingBucketsConfig(Boolean overlappingBuckets, List<Detector> detectors) {
|
||||
// If any detector function is rare/freq_rare, mustn't use overlapping buckets
|
||||
boolean mustNotUse = false;
|
||||
|
||||
List<String> illegalFunctions = new ArrayList<>();
|
||||
for (Detector d : detectors) {
|
||||
if (Detector.NO_OVERLAPPING_BUCKETS_FUNCTIONS.contains(d.getFunction())) {
|
||||
illegalFunctions.add(d.getFunction());
|
||||
mustNotUse = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (Boolean.TRUE.equals(overlappingBuckets) && mustNotUse) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_OVERLAPPING_BUCKETS_INCOMPATIBLE_FUNCTION, illegalFunctions.toString()));
|
||||
}
|
||||
|
||||
return overlappingBuckets;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Analysis limits for autodetect
|
||||
* <p>
|
||||
* If an option has not been set it shouldn't be used so the default value is picked up instead.
|
||||
*/
|
||||
public class AnalysisLimits extends ToXContentToBytes implements Writeable {
|
||||
/**
|
||||
* Serialisation field names
|
||||
*/
|
||||
public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit");
|
||||
public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorization_examples_limit");
|
||||
|
||||
public static final ConstructingObjectParser<AnalysisLimits, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"analysis_limits", a -> new AnalysisLimits((Long) a[0], (Long) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), MODEL_MEMORY_LIMIT);
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT);
|
||||
}
|
||||
|
||||
/**
|
||||
* It is initialised to <code>null</code>.
|
||||
* A value of <code>null</code> or <code>0</code> will result to the default being used.
|
||||
*/
|
||||
private final Long modelMemoryLimit;
|
||||
|
||||
/**
|
||||
* It is initialised to <code>null</code>.
|
||||
* A value of <code>null</code> will result to the default being used.
|
||||
*/
|
||||
private final Long categorizationExamplesLimit;
|
||||
|
||||
public AnalysisLimits(Long modelMemoryLimit, Long categorizationExamplesLimit) {
|
||||
this.modelMemoryLimit = modelMemoryLimit;
|
||||
if (categorizationExamplesLimit != null && categorizationExamplesLimit < 0) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, CATEGORIZATION_EXAMPLES_LIMIT, 0,
|
||||
categorizationExamplesLimit);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
this.categorizationExamplesLimit = categorizationExamplesLimit;
|
||||
}
|
||||
|
||||
public AnalysisLimits(StreamInput in) throws IOException {
|
||||
this(in.readOptionalLong(), in.readOptionalLong());
|
||||
}
|
||||
|
||||
/**
|
||||
* Maximum size of the model in MB before the anomaly detector
|
||||
* will drop new samples to prevent the model using any more
|
||||
* memory
|
||||
*
|
||||
* @return The set memory limit or <code>null</code> if not set
|
||||
*/
|
||||
@Nullable
|
||||
public Long getModelMemoryLimit() {
|
||||
return modelMemoryLimit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the limit to the number of examples that are stored per category
|
||||
*
|
||||
* @return the limit or <code>null</code> if not set
|
||||
*/
|
||||
@Nullable
|
||||
public Long getCategorizationExamplesLimit() {
|
||||
return categorizationExamplesLimit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalLong(modelMemoryLimit);
|
||||
out.writeOptionalLong(categorizationExamplesLimit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (modelMemoryLimit != null) {
|
||||
builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit);
|
||||
}
|
||||
if (categorizationExamplesLimit != null) {
|
||||
builder.field(CATEGORIZATION_EXAMPLES_LIMIT.getPreferredName(), categorizationExamplesLimit);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Overridden equality test
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other instanceof AnalysisLimits == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AnalysisLimits that = (AnalysisLimits) other;
|
||||
return Objects.equals(this.modelMemoryLimit, that.modelMemoryLimit) &&
|
||||
Objects.equals(this.categorizationExamplesLimit, that.categorizationExamplesLimit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(modelMemoryLimit, categorizationExamplesLimit);
|
||||
}
|
||||
}
|
@ -0,0 +1,131 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
|
||||
/**
|
||||
* A class that describes a condition.
|
||||
* The {@linkplain Operator} enum defines the available
|
||||
* comparisons a condition can use.
|
||||
*/
|
||||
public class Condition extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField CONDITION_FIELD = new ParseField("condition");
|
||||
public static final ParseField FILTER_VALUE_FIELD = new ParseField("value");
|
||||
|
||||
public static final ConstructingObjectParser<Condition, Void> PARSER = new ConstructingObjectParser<>(
|
||||
CONDITION_FIELD.getPreferredName(), a -> new Condition((Operator) a[0], (String) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return Operator.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, Operator.OPERATOR_FIELD, ValueType.STRING);
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return p.text();
|
||||
}
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
return null;
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, FILTER_VALUE_FIELD, ValueType.STRING_OR_NULL);
|
||||
}
|
||||
|
||||
private final Operator op;
|
||||
private final String filterValue;
|
||||
|
||||
public Condition(StreamInput in) throws IOException {
|
||||
op = Operator.readFromStream(in);
|
||||
filterValue = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
op.writeTo(out);
|
||||
out.writeOptionalString(filterValue);
|
||||
}
|
||||
|
||||
public Condition(Operator op, String filterValue) {
|
||||
if (filterValue == null) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NULL));
|
||||
}
|
||||
|
||||
if (op.expectsANumericArgument()) {
|
||||
try {
|
||||
Double.parseDouble(filterValue);
|
||||
} catch (NumberFormatException nfe) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NUMBER, filterValue);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
Pattern.compile(filterValue);
|
||||
} catch (PatternSyntaxException e) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_REGEX, filterValue);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
this.op = op;
|
||||
this.filterValue = filterValue;
|
||||
}
|
||||
|
||||
public Operator getOperator() {
|
||||
return op;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return filterValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Operator.OPERATOR_FIELD.getPreferredName(), op);
|
||||
builder.field(FILTER_VALUE_FIELD.getPreferredName(), filterValue);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(op, filterValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Condition other = (Condition) obj;
|
||||
return Objects.equals(this.op, other.op) &&
|
||||
Objects.equals(this.filterValue, other.filterValue);
|
||||
}
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
public enum Connective implements Writeable {
|
||||
OR, AND;
|
||||
|
||||
/**
|
||||
* Case-insensitive from string method.
|
||||
*
|
||||
* @param value
|
||||
* String representation
|
||||
* @return The connective type
|
||||
*/
|
||||
public static Connective fromString(String value) {
|
||||
return Connective.valueOf(value.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static Connective readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown Connective ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
@ -0,0 +1,352 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.time.DateTimeFormatterTimestampConverter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Describes the format of the data used in the job and how it should
|
||||
* be interpreted by autodetect.
|
||||
* <p>
|
||||
* Data must either be in a textual delineated format (e.g. csv, tsv) or JSON
|
||||
* the {@linkplain DataFormat} enum indicates which. {@link #getTimeField()}
|
||||
* is the name of the field containing the timestamp and {@link #getTimeFormat()}
|
||||
* is the format code for the date string in as described by
|
||||
* {@link java.time.format.DateTimeFormatter}. The default quote character for
|
||||
* delineated formats is {@value #DEFAULT_QUOTE_CHAR} but any other character can be
|
||||
* used.
|
||||
*/
|
||||
public class DataDescription extends ToXContentToBytes implements Writeable {
|
||||
/**
|
||||
* Enum of the acceptable data formats.
|
||||
*/
|
||||
public enum DataFormat implements Writeable {
|
||||
JSON,
|
||||
DELIMITED;
|
||||
|
||||
/**
|
||||
* Delimited used to be called delineated. We keep supporting that for backwards
|
||||
* compatibility.
|
||||
*/
|
||||
private static final String DEPRECATED_DELINEATED = "DELINEATED";
|
||||
|
||||
/**
|
||||
* Case-insensitive from string method.
|
||||
* Works with either JSON, json, etc.
|
||||
*
|
||||
* @param value String representation
|
||||
* @return The data format
|
||||
*/
|
||||
public static DataFormat forString(String value) {
|
||||
String valueUpperCase = value.toUpperCase(Locale.ROOT);
|
||||
return DEPRECATED_DELINEATED.equals(valueUpperCase) ? DELIMITED : DataFormat
|
||||
.valueOf(valueUpperCase);
|
||||
}
|
||||
|
||||
public static DataFormat readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown DataFormat ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
|
||||
private static final ParseField DATA_DESCRIPTION_FIELD = new ParseField("data_description");
|
||||
private static final ParseField FORMAT_FIELD = new ParseField("format");
|
||||
private static final ParseField TIME_FIELD_NAME_FIELD = new ParseField("time_field");
|
||||
private static final ParseField TIME_FORMAT_FIELD = new ParseField("time_format");
|
||||
private static final ParseField FIELD_DELIMITER_FIELD = new ParseField("field_delimiter");
|
||||
private static final ParseField QUOTE_CHARACTER_FIELD = new ParseField("quote_character");
|
||||
|
||||
/**
|
||||
* Special time format string for epoch times (seconds)
|
||||
*/
|
||||
public static final String EPOCH = "epoch";
|
||||
|
||||
/**
|
||||
* Special time format string for epoch times (milli-seconds)
|
||||
*/
|
||||
public static final String EPOCH_MS = "epoch_ms";
|
||||
|
||||
/**
|
||||
* By default autodetect expects the timestamp in a field with this name
|
||||
*/
|
||||
public static final String DEFAULT_TIME_FIELD = "time";
|
||||
|
||||
/**
|
||||
* The default field delimiter expected by the native autodetect
|
||||
* program.
|
||||
*/
|
||||
public static final char DEFAULT_DELIMITER = '\t';
|
||||
|
||||
/**
|
||||
* Csv data must have this line ending
|
||||
*/
|
||||
public static final char LINE_ENDING = '\n';
|
||||
|
||||
/**
|
||||
* The default quote character used to escape text in
|
||||
* delineated data formats
|
||||
*/
|
||||
public static final char DEFAULT_QUOTE_CHAR = '"';
|
||||
|
||||
private final DataFormat dataFormat;
|
||||
private final String timeFieldName;
|
||||
private final String timeFormat;
|
||||
private final char fieldDelimiter;
|
||||
private final char quoteCharacter;
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER =
|
||||
new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), Builder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setFormat, FORMAT_FIELD);
|
||||
PARSER.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD);
|
||||
PARSER.declareField(Builder::setFieldDelimiter, DataDescription::extractChar, FIELD_DELIMITER_FIELD, ValueType.STRING);
|
||||
PARSER.declareField(Builder::setQuoteCharacter, DataDescription::extractChar, QUOTE_CHARACTER_FIELD, ValueType.STRING);
|
||||
}
|
||||
|
||||
public DataDescription(DataFormat dataFormat, String timeFieldName, String timeFormat, char fieldDelimiter, char quoteCharacter) {
|
||||
this.dataFormat = dataFormat;
|
||||
this.timeFieldName = timeFieldName;
|
||||
this.timeFormat = timeFormat;
|
||||
this.fieldDelimiter = fieldDelimiter;
|
||||
this.quoteCharacter = quoteCharacter;
|
||||
}
|
||||
|
||||
public DataDescription(StreamInput in) throws IOException {
|
||||
dataFormat = DataFormat.readFromStream(in);
|
||||
timeFieldName = in.readString();
|
||||
timeFormat = in.readString();
|
||||
fieldDelimiter = (char) in.read();
|
||||
quoteCharacter = (char) in.read();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
dataFormat.writeTo(out);
|
||||
out.writeString(timeFieldName);
|
||||
out.writeString(timeFormat);
|
||||
out.write(fieldDelimiter);
|
||||
out.write(quoteCharacter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FORMAT_FIELD.getPreferredName(), dataFormat);
|
||||
builder.field(TIME_FIELD_NAME_FIELD.getPreferredName(), timeFieldName);
|
||||
builder.field(TIME_FORMAT_FIELD.getPreferredName(), timeFormat);
|
||||
builder.field(FIELD_DELIMITER_FIELD.getPreferredName(), String.valueOf(fieldDelimiter));
|
||||
builder.field(QUOTE_CHARACTER_FIELD.getPreferredName(), String.valueOf(quoteCharacter));
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* The format of the data to be processed.
|
||||
* Defaults to {@link DataDescription.DataFormat#DELIMITED}
|
||||
*
|
||||
* @return The data format
|
||||
*/
|
||||
public DataFormat getFormat() {
|
||||
return dataFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the field containing the timestamp
|
||||
*
|
||||
* @return A String if set or <code>null</code>
|
||||
*/
|
||||
public String getTimeField() {
|
||||
return timeFieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Either {@value #EPOCH}, {@value #EPOCH_MS} or a SimpleDateTime format string.
|
||||
* If not set (is <code>null</code> or an empty string) or set to
|
||||
* {@value #EPOCH} (the default) then the date is assumed to be in
|
||||
* seconds from the epoch.
|
||||
*
|
||||
* @return A String if set or <code>null</code>
|
||||
*/
|
||||
public String getTimeFormat() {
|
||||
return timeFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the data is in a delineated format with a header e.g. csv or tsv
|
||||
* this is the delimiter character used. This is only applicable if
|
||||
* {@linkplain #getFormat()} is {@link DataDescription.DataFormat#DELIMITED}.
|
||||
* The default value is {@value #DEFAULT_DELIMITER}
|
||||
*
|
||||
* @return A char
|
||||
*/
|
||||
public char getFieldDelimiter() {
|
||||
return fieldDelimiter;
|
||||
}
|
||||
|
||||
/**
|
||||
* The quote character used in delineated formats.
|
||||
* Defaults to {@value #DEFAULT_QUOTE_CHAR}
|
||||
*
|
||||
* @return The delineated format quote character
|
||||
*/
|
||||
public char getQuoteCharacter() {
|
||||
return quoteCharacter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the data described by this object needs
|
||||
* transforming before processing by autodetect.
|
||||
* A transformation must be applied if either a timeformat is
|
||||
* not in seconds since the epoch or the data is in Json format.
|
||||
*
|
||||
* @return True if the data should be transformed.
|
||||
*/
|
||||
public boolean transform() {
|
||||
return dataFormat == DataFormat.JSON ||
|
||||
isTransformTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if the time is in a format that needs transforming.
|
||||
* Anytime format this isn't {@value #EPOCH} or <code>null</code>
|
||||
* needs transforming.
|
||||
*
|
||||
* @return True if the time field needs to be transformed.
|
||||
*/
|
||||
public boolean isTransformTime() {
|
||||
return timeFormat != null && !EPOCH.equals(timeFormat);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if the time format is {@value #EPOCH_MS}
|
||||
*
|
||||
* @return True if the date is in milli-seconds since the epoch.
|
||||
*/
|
||||
public boolean isEpochMs() {
|
||||
return EPOCH_MS.equals(timeFormat);
|
||||
}
|
||||
|
||||
private static char extractChar(XContentParser parser) throws IOException {
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
String charStr = parser.text();
|
||||
if (charStr.length() != 1) {
|
||||
throw new IllegalArgumentException("String must be a single character, found [" + charStr + "]");
|
||||
}
|
||||
return charStr.charAt(0);
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + parser.currentToken() + "]");
|
||||
}
|
||||
|
||||
/**
|
||||
* Overridden equality test
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other instanceof DataDescription == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DataDescription that = (DataDescription) other;
|
||||
|
||||
return this.dataFormat == that.dataFormat &&
|
||||
this.quoteCharacter == that.quoteCharacter &&
|
||||
Objects.equals(this.timeFieldName, that.timeFieldName) &&
|
||||
Objects.equals(this.timeFormat, that.timeFormat) &&
|
||||
Objects.equals(this.fieldDelimiter, that.fieldDelimiter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(dataFormat, quoteCharacter, timeFieldName,
|
||||
timeFormat, fieldDelimiter);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private DataFormat dataFormat = DataFormat.DELIMITED;
|
||||
private String timeFieldName = DEFAULT_TIME_FIELD;
|
||||
private String timeFormat = EPOCH;
|
||||
private char fieldDelimiter = DEFAULT_DELIMITER;
|
||||
private char quoteCharacter = DEFAULT_QUOTE_CHAR;
|
||||
|
||||
public void setFormat(DataFormat format) {
|
||||
dataFormat = ExceptionsHelper.requireNonNull(format, FORMAT_FIELD.getPreferredName() + " must not be null");
|
||||
}
|
||||
|
||||
private void setFormat(String format) {
|
||||
setFormat(DataFormat.forString(format));
|
||||
}
|
||||
|
||||
public void setTimeField(String fieldName) {
|
||||
timeFieldName = ExceptionsHelper.requireNonNull(fieldName, TIME_FIELD_NAME_FIELD.getPreferredName() + " must not be null");
|
||||
}
|
||||
|
||||
public void setTimeFormat(String format) {
|
||||
ExceptionsHelper.requireNonNull(format, TIME_FORMAT_FIELD.getPreferredName() + " must not be null");
|
||||
switch (format) {
|
||||
case EPOCH:
|
||||
case EPOCH_MS:
|
||||
break;
|
||||
default:
|
||||
try {
|
||||
DateTimeFormatterTimestampConverter.ofPattern(format, ZoneOffset.UTC);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format));
|
||||
}
|
||||
}
|
||||
timeFormat = format;
|
||||
}
|
||||
|
||||
public void setFieldDelimiter(char delimiter) {
|
||||
fieldDelimiter = delimiter;
|
||||
}
|
||||
|
||||
public void setQuoteCharacter(char value) {
|
||||
quoteCharacter = value;
|
||||
}
|
||||
|
||||
public DataDescription build() {
|
||||
return new DataDescription(dataFormat, timeFieldName, timeFormat, fieldDelimiter,quoteCharacter);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,82 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.xpack.ml.utils.MlStrings;
|
||||
|
||||
|
||||
public final class DefaultDetectorDescription {
|
||||
private static final String BY_TOKEN = " by ";
|
||||
private static final String OVER_TOKEN = " over ";
|
||||
|
||||
private static final String USE_NULL_OPTION = " usenull=";
|
||||
private static final String PARTITION_FIELD_OPTION = " partitionfield=";
|
||||
private static final String EXCLUDE_FREQUENT_OPTION = " excludefrequent=";
|
||||
|
||||
private DefaultDetectorDescription() {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the default description for the given {@code detector}
|
||||
*
|
||||
* @param detector the {@code Detector} for which a default description is requested
|
||||
* @return the default description
|
||||
*/
|
||||
public static String of(Detector detector) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
appendOn(detector, sb);
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends to the given {@code StringBuilder} the default description
|
||||
* for the given {@code detector}
|
||||
*
|
||||
* @param detector the {@code Detector} for which a default description is requested
|
||||
* @param sb the {@code StringBuilder} to append to
|
||||
*/
|
||||
public static void appendOn(Detector detector, StringBuilder sb) {
|
||||
if (isNotNullOrEmpty(detector.getFunction())) {
|
||||
sb.append(detector.getFunction());
|
||||
if (isNotNullOrEmpty(detector.getFieldName())) {
|
||||
sb.append('(').append(quoteField(detector.getFieldName()))
|
||||
.append(')');
|
||||
}
|
||||
} else if (isNotNullOrEmpty(detector.getFieldName())) {
|
||||
sb.append(quoteField(detector.getFieldName()));
|
||||
}
|
||||
|
||||
if (isNotNullOrEmpty(detector.getByFieldName())) {
|
||||
sb.append(BY_TOKEN).append(quoteField(detector.getByFieldName()));
|
||||
}
|
||||
|
||||
if (isNotNullOrEmpty(detector.getOverFieldName())) {
|
||||
sb.append(OVER_TOKEN).append(quoteField(detector.getOverFieldName()));
|
||||
}
|
||||
|
||||
if (detector.isUseNull()) {
|
||||
sb.append(USE_NULL_OPTION).append(detector.isUseNull());
|
||||
}
|
||||
|
||||
if (isNotNullOrEmpty(detector.getPartitionFieldName())) {
|
||||
sb.append(PARTITION_FIELD_OPTION).append(quoteField(detector.getPartitionFieldName()));
|
||||
}
|
||||
|
||||
if (detector.getExcludeFrequent() != null) {
|
||||
sb.append(EXCLUDE_FREQUENT_OPTION).append(detector.getExcludeFrequent());
|
||||
}
|
||||
}
|
||||
|
||||
private static String quoteField(String field) {
|
||||
return MlStrings.doubleQuoteIfNotAlphaNumeric(field);
|
||||
}
|
||||
|
||||
private static boolean isNotNullOrEmpty(String arg) {
|
||||
return !Strings.isNullOrEmpty(arg);
|
||||
}
|
||||
}
|
@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
/**
|
||||
* Factory methods for a sensible default for the datafeed frequency
|
||||
*/
|
||||
public final class DefaultFrequency {
|
||||
private static final int SECONDS_IN_MINUTE = 60;
|
||||
private static final int TWO_MINS_SECONDS = 2 * SECONDS_IN_MINUTE;
|
||||
private static final int TWENTY_MINS_SECONDS = 20 * SECONDS_IN_MINUTE;
|
||||
private static final int HALF_DAY_SECONDS = 12 * 60 * SECONDS_IN_MINUTE;
|
||||
private static final Duration TEN_MINUTES = Duration.ofMinutes(10);
|
||||
private static final Duration ONE_HOUR = Duration.ofHours(1);
|
||||
|
||||
private DefaultFrequency() {
|
||||
// Do nothing
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a sensible default frequency for a given bucket span.
|
||||
* <p>
|
||||
* The default depends on the bucket span:
|
||||
* <ul>
|
||||
* <li> <= 2 mins -> 1 min</li>
|
||||
* <li> <= 20 mins -> bucket span / 2</li>
|
||||
* <li> <= 12 hours -> 10 mins</li>
|
||||
* <li> > 12 hours -> 1 hour</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param bucketSpanSeconds the bucket span in seconds
|
||||
* @return the default frequency
|
||||
*/
|
||||
public static Duration ofBucketSpan(long bucketSpanSeconds) {
|
||||
if (bucketSpanSeconds <= 0) {
|
||||
throw new IllegalArgumentException("Bucket span has to be > 0");
|
||||
}
|
||||
|
||||
if (bucketSpanSeconds <= TWO_MINS_SECONDS) {
|
||||
return Duration.ofSeconds(SECONDS_IN_MINUTE);
|
||||
}
|
||||
if (bucketSpanSeconds <= TWENTY_MINS_SECONDS) {
|
||||
return Duration.ofSeconds(bucketSpanSeconds / 2);
|
||||
}
|
||||
if (bucketSpanSeconds <= HALF_DAY_SECONDS) {
|
||||
return TEN_MINUTES;
|
||||
}
|
||||
return ONE_HOUR;
|
||||
}
|
||||
}
|
@ -0,0 +1,167 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class DetectionRule extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField DETECTION_RULE_FIELD = new ParseField("detection_rule");
|
||||
public static final ParseField TARGET_FIELD_NAME_FIELD = new ParseField("target_field_name");
|
||||
public static final ParseField TARGET_FIELD_VALUE_FIELD = new ParseField("target_field_value");
|
||||
public static final ParseField CONDITIONS_CONNECTIVE_FIELD = new ParseField("conditions_connective");
|
||||
public static final ParseField RULE_CONDITIONS_FIELD = new ParseField("rule_conditions");
|
||||
|
||||
public static final ConstructingObjectParser<DetectionRule, Void> PARSER = new ConstructingObjectParser<>(
|
||||
DETECTION_RULE_FIELD.getPreferredName(),
|
||||
arr -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<RuleCondition> rules = (List<RuleCondition>) arr[3];
|
||||
return new DetectionRule((String) arr[0], (String) arr[1], (Connective) arr[2], rules);
|
||||
}
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TARGET_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TARGET_FIELD_VALUE_FIELD);
|
||||
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return Connective.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, CONDITIONS_CONNECTIVE_FIELD, ValueType.STRING);
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(),
|
||||
(parser, parseFieldMatcher) -> RuleCondition.PARSER.apply(parser, parseFieldMatcher), RULE_CONDITIONS_FIELD);
|
||||
}
|
||||
|
||||
private final RuleAction ruleAction = RuleAction.FILTER_RESULTS;
|
||||
private final String targetFieldName;
|
||||
private final String targetFieldValue;
|
||||
private final Connective conditionsConnective;
|
||||
private final List<RuleCondition> ruleConditions;
|
||||
|
||||
public DetectionRule(StreamInput in) throws IOException {
|
||||
conditionsConnective = Connective.readFromStream(in);
|
||||
int size = in.readVInt();
|
||||
ruleConditions = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
ruleConditions.add(new RuleCondition(in));
|
||||
}
|
||||
targetFieldName = in.readOptionalString();
|
||||
targetFieldValue = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
conditionsConnective.writeTo(out);
|
||||
out.writeVInt(ruleConditions.size());
|
||||
for (RuleCondition condition : ruleConditions) {
|
||||
condition.writeTo(out);
|
||||
}
|
||||
out.writeOptionalString(targetFieldName);
|
||||
out.writeOptionalString(targetFieldValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(CONDITIONS_CONNECTIVE_FIELD.getPreferredName(), conditionsConnective);
|
||||
builder.field(RULE_CONDITIONS_FIELD.getPreferredName(), ruleConditions);
|
||||
if (targetFieldName != null) {
|
||||
builder.field(TARGET_FIELD_NAME_FIELD.getPreferredName(), targetFieldName);
|
||||
}
|
||||
if (targetFieldValue != null) {
|
||||
builder.field(TARGET_FIELD_VALUE_FIELD.getPreferredName(), targetFieldValue);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public DetectionRule(String targetFieldName, String targetFieldValue, Connective conditionsConnective,
|
||||
List<RuleCondition> ruleConditions) {
|
||||
if (targetFieldValue != null && targetFieldName == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_MISSING_TARGET_FIELD_NAME, targetFieldValue);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
if (ruleConditions == null || ruleConditions.isEmpty()) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_REQUIRES_AT_LEAST_ONE_CONDITION);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
for (RuleCondition condition : ruleConditions) {
|
||||
if (condition.getConditionType() == RuleConditionType.CATEGORICAL && targetFieldName != null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_CATEGORICAL_INVALID_OPTION,
|
||||
DetectionRule.TARGET_FIELD_NAME_FIELD.getPreferredName());
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
this.targetFieldName = targetFieldName;
|
||||
this.targetFieldValue = targetFieldValue;
|
||||
this.conditionsConnective = conditionsConnective != null ? conditionsConnective : Connective.OR;
|
||||
this.ruleConditions = Collections.unmodifiableList(ruleConditions);
|
||||
}
|
||||
|
||||
public RuleAction getRuleAction() {
|
||||
return ruleAction;
|
||||
}
|
||||
|
||||
public String getTargetFieldName() {
|
||||
return targetFieldName;
|
||||
}
|
||||
|
||||
public String getTargetFieldValue() {
|
||||
return targetFieldValue;
|
||||
}
|
||||
|
||||
public Connective getConditionsConnective() {
|
||||
return conditionsConnective;
|
||||
}
|
||||
|
||||
public List<RuleCondition> getRuleConditions() {
|
||||
return ruleConditions;
|
||||
}
|
||||
|
||||
public Set<String> extractReferencedFilters() {
|
||||
return ruleConditions.stream().map(RuleCondition::getValueFilter).filter(Objects::nonNull).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj instanceof DetectionRule == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DetectionRule other = (DetectionRule) obj;
|
||||
return Objects.equals(ruleAction, other.ruleAction) && Objects.equals(targetFieldName, other.targetFieldName)
|
||||
&& Objects.equals(targetFieldValue, other.targetFieldValue)
|
||||
&& Objects.equals(conditionsConnective, other.conditionsConnective) && Objects.equals(ruleConditions, other.ruleConditions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(ruleAction, targetFieldName, targetFieldValue, conditionsConnective, ruleConditions);
|
||||
}
|
||||
}
|
@ -0,0 +1,770 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
/**
|
||||
* Defines the fields to be used in the analysis.
|
||||
* <code>fieldname</code> must be set and only one of <code>byFieldName</code>
|
||||
* and <code>overFieldName</code> should be set.
|
||||
*/
|
||||
public class Detector extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public enum ExcludeFrequent implements Writeable {
|
||||
ALL,
|
||||
NONE,
|
||||
BY,
|
||||
OVER;
|
||||
|
||||
/**
|
||||
* Case-insensitive from string method.
|
||||
* Works with either JSON, json, etc.
|
||||
*
|
||||
* @param value String representation
|
||||
* @return The data format
|
||||
*/
|
||||
public static ExcludeFrequent forString(String value) {
|
||||
return valueOf(value.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static ExcludeFrequent readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown ExcludeFrequent ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
|
||||
public static final ParseField DETECTOR_DESCRIPTION_FIELD = new ParseField("detector_description");
|
||||
public static final ParseField FUNCTION_FIELD = new ParseField("function");
|
||||
public static final ParseField FIELD_NAME_FIELD = new ParseField("field_name");
|
||||
public static final ParseField BY_FIELD_NAME_FIELD = new ParseField("by_field_name");
|
||||
public static final ParseField OVER_FIELD_NAME_FIELD = new ParseField("over_field_name");
|
||||
public static final ParseField PARTITION_FIELD_NAME_FIELD = new ParseField("partition_field_name");
|
||||
public static final ParseField USE_NULL_FIELD = new ParseField("use_null");
|
||||
public static final ParseField EXCLUDE_FREQUENT_FIELD = new ParseField("exclude_frequent");
|
||||
public static final ParseField DETECTOR_RULES_FIELD = new ParseField("detector_rules");
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("detector", Builder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD);
|
||||
PARSER.declareString(Builder::setFunction, FUNCTION_FIELD);
|
||||
PARSER.declareString(Builder::setFieldName, FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD);
|
||||
PARSER.declareBoolean(Builder::setUseNull, USE_NULL_FIELD);
|
||||
PARSER.declareField(Builder::setExcludeFrequent, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return ExcludeFrequent.forString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING);
|
||||
PARSER.declareObjectArray(Builder::setDetectorRules, DetectionRule.PARSER, DETECTOR_RULES_FIELD);
|
||||
}
|
||||
|
||||
public static final String COUNT = "count";
|
||||
public static final String HIGH_COUNT = "high_count";
|
||||
public static final String LOW_COUNT = "low_count";
|
||||
public static final String NON_ZERO_COUNT = "non_zero_count";
|
||||
public static final String LOW_NON_ZERO_COUNT = "low_non_zero_count";
|
||||
public static final String HIGH_NON_ZERO_COUNT = "high_non_zero_count";
|
||||
public static final String NZC = "nzc";
|
||||
public static final String LOW_NZC = "low_nzc";
|
||||
public static final String HIGH_NZC = "high_nzc";
|
||||
public static final String DISTINCT_COUNT = "distinct_count";
|
||||
public static final String LOW_DISTINCT_COUNT = "low_distinct_count";
|
||||
public static final String HIGH_DISTINCT_COUNT = "high_distinct_count";
|
||||
public static final String DC = "dc";
|
||||
public static final String LOW_DC = "low_dc";
|
||||
public static final String HIGH_DC = "high_dc";
|
||||
public static final String RARE = "rare";
|
||||
public static final String FREQ_RARE = "freq_rare";
|
||||
public static final String INFO_CONTENT = "info_content";
|
||||
public static final String LOW_INFO_CONTENT = "low_info_content";
|
||||
public static final String HIGH_INFO_CONTENT = "high_info_content";
|
||||
public static final String METRIC = "metric";
|
||||
public static final String MEAN = "mean";
|
||||
public static final String MEDIAN = "median";
|
||||
public static final String HIGH_MEAN = "high_mean";
|
||||
public static final String LOW_MEAN = "low_mean";
|
||||
public static final String AVG = "avg";
|
||||
public static final String HIGH_AVG = "high_avg";
|
||||
public static final String LOW_AVG = "low_avg";
|
||||
public static final String MIN = "min";
|
||||
public static final String MAX = "max";
|
||||
public static final String SUM = "sum";
|
||||
public static final String LOW_SUM = "low_sum";
|
||||
public static final String HIGH_SUM = "high_sum";
|
||||
public static final String NON_NULL_SUM = "non_null_sum";
|
||||
public static final String LOW_NON_NULL_SUM = "low_non_null_sum";
|
||||
public static final String HIGH_NON_NULL_SUM = "high_non_null_sum";
|
||||
/**
|
||||
* Population variance is called varp to match Splunk
|
||||
*/
|
||||
public static final String POPULATION_VARIANCE = "varp";
|
||||
public static final String LOW_POPULATION_VARIANCE = "low_varp";
|
||||
public static final String HIGH_POPULATION_VARIANCE = "high_varp";
|
||||
public static final String TIME_OF_DAY = "time_of_day";
|
||||
public static final String TIME_OF_WEEK = "time_of_week";
|
||||
public static final String LAT_LONG = "lat_long";
|
||||
|
||||
|
||||
/**
|
||||
* The set of valid function names.
|
||||
*/
|
||||
public static final Set<String> ANALYSIS_FUNCTIONS =
|
||||
new HashSet<>(Arrays.asList(
|
||||
// The convention here is that synonyms (only) go on the same line
|
||||
COUNT,
|
||||
HIGH_COUNT,
|
||||
LOW_COUNT,
|
||||
NON_ZERO_COUNT, NZC,
|
||||
LOW_NON_ZERO_COUNT, LOW_NZC,
|
||||
HIGH_NON_ZERO_COUNT, HIGH_NZC,
|
||||
DISTINCT_COUNT, DC,
|
||||
LOW_DISTINCT_COUNT, LOW_DC,
|
||||
HIGH_DISTINCT_COUNT, HIGH_DC,
|
||||
RARE,
|
||||
FREQ_RARE,
|
||||
INFO_CONTENT,
|
||||
LOW_INFO_CONTENT,
|
||||
HIGH_INFO_CONTENT,
|
||||
METRIC,
|
||||
MEAN, AVG,
|
||||
HIGH_MEAN, HIGH_AVG,
|
||||
LOW_MEAN, LOW_AVG,
|
||||
MEDIAN,
|
||||
MIN,
|
||||
MAX,
|
||||
SUM,
|
||||
LOW_SUM,
|
||||
HIGH_SUM,
|
||||
NON_NULL_SUM,
|
||||
LOW_NON_NULL_SUM,
|
||||
HIGH_NON_NULL_SUM,
|
||||
POPULATION_VARIANCE,
|
||||
LOW_POPULATION_VARIANCE,
|
||||
HIGH_POPULATION_VARIANCE,
|
||||
TIME_OF_DAY,
|
||||
TIME_OF_WEEK,
|
||||
LAT_LONG
|
||||
));
|
||||
|
||||
/**
|
||||
* The set of functions that do not require a field, by field or over field
|
||||
*/
|
||||
public static final Set<String> COUNT_WITHOUT_FIELD_FUNCTIONS =
|
||||
new HashSet<>(Arrays.asList(
|
||||
COUNT,
|
||||
HIGH_COUNT,
|
||||
LOW_COUNT,
|
||||
NON_ZERO_COUNT, NZC,
|
||||
LOW_NON_ZERO_COUNT, LOW_NZC,
|
||||
HIGH_NON_ZERO_COUNT, HIGH_NZC,
|
||||
TIME_OF_DAY,
|
||||
TIME_OF_WEEK
|
||||
));
|
||||
|
||||
/**
|
||||
* The set of functions that require a fieldname
|
||||
*/
|
||||
public static final Set<String> FIELD_NAME_FUNCTIONS =
|
||||
new HashSet<>(Arrays.asList(
|
||||
DISTINCT_COUNT, DC,
|
||||
LOW_DISTINCT_COUNT, LOW_DC,
|
||||
HIGH_DISTINCT_COUNT, HIGH_DC,
|
||||
INFO_CONTENT,
|
||||
LOW_INFO_CONTENT,
|
||||
HIGH_INFO_CONTENT,
|
||||
METRIC,
|
||||
MEAN, AVG,
|
||||
HIGH_MEAN, HIGH_AVG,
|
||||
LOW_MEAN, LOW_AVG,
|
||||
MEDIAN,
|
||||
MIN,
|
||||
MAX,
|
||||
SUM,
|
||||
LOW_SUM,
|
||||
HIGH_SUM,
|
||||
NON_NULL_SUM,
|
||||
LOW_NON_NULL_SUM,
|
||||
HIGH_NON_NULL_SUM,
|
||||
POPULATION_VARIANCE,
|
||||
LOW_POPULATION_VARIANCE,
|
||||
HIGH_POPULATION_VARIANCE,
|
||||
LAT_LONG
|
||||
));
|
||||
|
||||
/**
|
||||
* The set of functions that require a by fieldname
|
||||
*/
|
||||
public static final Set<String> BY_FIELD_NAME_FUNCTIONS =
|
||||
new HashSet<>(Arrays.asList(
|
||||
RARE,
|
||||
FREQ_RARE
|
||||
));
|
||||
|
||||
/**
|
||||
* The set of functions that require a over fieldname
|
||||
*/
|
||||
public static final Set<String> OVER_FIELD_NAME_FUNCTIONS =
|
||||
new HashSet<>(Arrays.asList(
|
||||
FREQ_RARE
|
||||
));
|
||||
|
||||
/**
|
||||
* The set of functions that cannot have a by fieldname
|
||||
*/
|
||||
public static final Set<String> NO_BY_FIELD_NAME_FUNCTIONS =
|
||||
new HashSet<>();
|
||||
|
||||
/**
|
||||
* The set of functions that cannot have an over fieldname
|
||||
*/
|
||||
public static final Set<String> NO_OVER_FIELD_NAME_FUNCTIONS =
|
||||
new HashSet<>(Arrays.asList(
|
||||
NON_ZERO_COUNT, NZC,
|
||||
LOW_NON_ZERO_COUNT, LOW_NZC,
|
||||
HIGH_NON_ZERO_COUNT, HIGH_NZC
|
||||
));
|
||||
|
||||
/**
|
||||
* The set of functions that must not be used with overlapping buckets
|
||||
*/
|
||||
public static final Set<String> NO_OVERLAPPING_BUCKETS_FUNCTIONS =
|
||||
new HashSet<>(Arrays.asList(
|
||||
RARE,
|
||||
FREQ_RARE
|
||||
));
|
||||
|
||||
/**
|
||||
* The set of functions that should not be used with overlapping buckets
|
||||
* as they gain no benefit but have overhead
|
||||
*/
|
||||
public static final Set<String> OVERLAPPING_BUCKETS_FUNCTIONS_NOT_NEEDED =
|
||||
new HashSet<>(Arrays.asList(
|
||||
MIN,
|
||||
MAX,
|
||||
TIME_OF_DAY,
|
||||
TIME_OF_WEEK
|
||||
));
|
||||
|
||||
/**
|
||||
* field names cannot contain any of these characters
|
||||
* ", \
|
||||
*/
|
||||
public static final Character[] PROHIBITED_FIELDNAME_CHARACTERS = {'"', '\\'};
|
||||
public static final String PROHIBITED = String.join(",",
|
||||
Arrays.stream(PROHIBITED_FIELDNAME_CHARACTERS).map(
|
||||
c -> Character.toString(c)).collect(Collectors.toList()));
|
||||
|
||||
|
||||
private final String detectorDescription;
|
||||
private final String function;
|
||||
private final String fieldName;
|
||||
private final String byFieldName;
|
||||
private final String overFieldName;
|
||||
private final String partitionFieldName;
|
||||
private final boolean useNull;
|
||||
private final ExcludeFrequent excludeFrequent;
|
||||
private final List<DetectionRule> detectorRules;
|
||||
|
||||
public Detector(StreamInput in) throws IOException {
|
||||
detectorDescription = in.readString();
|
||||
function = in.readString();
|
||||
fieldName = in.readOptionalString();
|
||||
byFieldName = in.readOptionalString();
|
||||
overFieldName = in.readOptionalString();
|
||||
partitionFieldName = in.readOptionalString();
|
||||
useNull = in.readBoolean();
|
||||
excludeFrequent = in.readBoolean() ? ExcludeFrequent.readFromStream(in) : null;
|
||||
detectorRules = in.readList(DetectionRule::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(detectorDescription);
|
||||
out.writeString(function);
|
||||
out.writeOptionalString(fieldName);
|
||||
out.writeOptionalString(byFieldName);
|
||||
out.writeOptionalString(overFieldName);
|
||||
out.writeOptionalString(partitionFieldName);
|
||||
out.writeBoolean(useNull);
|
||||
if (excludeFrequent != null) {
|
||||
out.writeBoolean(true);
|
||||
excludeFrequent.writeTo(out);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeList(detectorRules);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DETECTOR_DESCRIPTION_FIELD.getPreferredName(), detectorDescription);
|
||||
builder.field(FUNCTION_FIELD.getPreferredName(), function);
|
||||
if (fieldName != null) {
|
||||
builder.field(FIELD_NAME_FIELD.getPreferredName(), fieldName);
|
||||
}
|
||||
if (byFieldName != null) {
|
||||
builder.field(BY_FIELD_NAME_FIELD.getPreferredName(), byFieldName);
|
||||
}
|
||||
if (overFieldName != null) {
|
||||
builder.field(OVER_FIELD_NAME_FIELD.getPreferredName(), overFieldName);
|
||||
}
|
||||
if (partitionFieldName != null) {
|
||||
builder.field(PARTITION_FIELD_NAME_FIELD.getPreferredName(), partitionFieldName);
|
||||
}
|
||||
if (useNull) {
|
||||
builder.field(USE_NULL_FIELD.getPreferredName(), useNull);
|
||||
}
|
||||
if (excludeFrequent != null) {
|
||||
builder.field(EXCLUDE_FREQUENT_FIELD.getPreferredName(), excludeFrequent);
|
||||
}
|
||||
builder.field(DETECTOR_RULES_FIELD.getPreferredName(), detectorRules);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
private Detector(String detectorDescription, String function, String fieldName, String byFieldName, String overFieldName,
|
||||
String partitionFieldName, boolean useNull, ExcludeFrequent excludeFrequent, List<DetectionRule> detectorRules) {
|
||||
this.function = function;
|
||||
this.fieldName = fieldName;
|
||||
this.byFieldName = byFieldName;
|
||||
this.overFieldName = overFieldName;
|
||||
this.partitionFieldName = partitionFieldName;
|
||||
this.useNull = useNull;
|
||||
this.excludeFrequent = excludeFrequent;
|
||||
// REMOVE THIS LINE WHEN REMOVING JACKSON_DATABIND:
|
||||
detectorRules = detectorRules != null ? detectorRules : Collections.emptyList();
|
||||
this.detectorRules = Collections.unmodifiableList(detectorRules);
|
||||
this.detectorDescription = detectorDescription != null ? detectorDescription : DefaultDetectorDescription.of(this);
|
||||
}
|
||||
|
||||
public String getDetectorDescription() {
|
||||
return detectorDescription;
|
||||
}
|
||||
|
||||
/**
|
||||
* The analysis function used e.g. count, rare, min etc. There is no
|
||||
* validation to check this value is one a predefined set
|
||||
*
|
||||
* @return The function or <code>null</code> if not set
|
||||
*/
|
||||
public String getFunction() {
|
||||
return function;
|
||||
}
|
||||
|
||||
/**
|
||||
* The Analysis field
|
||||
*
|
||||
* @return The field to analyse
|
||||
*/
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* The 'by' field or <code>null</code> if not set.
|
||||
*
|
||||
* @return The 'by' field
|
||||
*/
|
||||
public String getByFieldName() {
|
||||
return byFieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* The 'over' field or <code>null</code> if not set.
|
||||
*
|
||||
* @return The 'over' field
|
||||
*/
|
||||
public String getOverFieldName() {
|
||||
return overFieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Segments the analysis along another field to have completely
|
||||
* independent baselines for each instance of partitionfield
|
||||
*
|
||||
* @return The Partition Field
|
||||
*/
|
||||
public String getPartitionFieldName() {
|
||||
return partitionFieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Where there isn't a value for the 'by' or 'over' field should a new
|
||||
* series be used as the 'null' series.
|
||||
*
|
||||
* @return true if the 'null' series should be created
|
||||
*/
|
||||
public boolean isUseNull() {
|
||||
return useNull;
|
||||
}
|
||||
|
||||
/**
|
||||
* Excludes frequently-occuring metrics from the analysis;
|
||||
* can apply to 'by' field, 'over' field, or both
|
||||
*
|
||||
* @return the value that the user set
|
||||
*/
|
||||
public ExcludeFrequent getExcludeFrequent() {
|
||||
return excludeFrequent;
|
||||
}
|
||||
|
||||
public List<DetectionRule> getDetectorRules() {
|
||||
return detectorRules;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list with the byFieldName, overFieldName and partitionFieldName that are not null
|
||||
*
|
||||
* @return a list with the byFieldName, overFieldName and partitionFieldName that are not null
|
||||
*/
|
||||
public List<String> extractAnalysisFields() {
|
||||
List<String> analysisFields = Arrays.asList(getByFieldName(),
|
||||
getOverFieldName(), getPartitionFieldName());
|
||||
return analysisFields.stream().filter(item -> item != null).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public Set<String> extractReferencedFilters() {
|
||||
return detectorRules == null ? Collections.emptySet()
|
||||
: detectorRules.stream().map(DetectionRule::extractReferencedFilters)
|
||||
.flatMap(Set::stream).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other instanceof Detector == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Detector that = (Detector) other;
|
||||
|
||||
return Objects.equals(this.detectorDescription, that.detectorDescription) &&
|
||||
Objects.equals(this.function, that.function) &&
|
||||
Objects.equals(this.fieldName, that.fieldName) &&
|
||||
Objects.equals(this.byFieldName, that.byFieldName) &&
|
||||
Objects.equals(this.overFieldName, that.overFieldName) &&
|
||||
Objects.equals(this.partitionFieldName, that.partitionFieldName) &&
|
||||
Objects.equals(this.useNull, that.useNull) &&
|
||||
Objects.equals(this.excludeFrequent, that.excludeFrequent) &&
|
||||
Objects.equals(this.detectorRules, that.detectorRules);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(detectorDescription, function, fieldName, byFieldName,
|
||||
overFieldName, partitionFieldName, useNull, excludeFrequent,
|
||||
detectorRules);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
/**
|
||||
* Functions that do not support rules:
|
||||
* <ul>
|
||||
* <li>lat_long - because it is a multivariate feature
|
||||
* <li>metric - because having the same conditions on min,max,mean is
|
||||
* error-prone
|
||||
* </ul>
|
||||
*/
|
||||
static final Set<String> FUNCTIONS_WITHOUT_RULE_SUPPORT = new HashSet<>(Arrays.asList(Detector.LAT_LONG, Detector.METRIC));
|
||||
|
||||
private String detectorDescription;
|
||||
private String function;
|
||||
private String fieldName;
|
||||
private String byFieldName;
|
||||
private String overFieldName;
|
||||
private String partitionFieldName;
|
||||
private boolean useNull = false;
|
||||
private ExcludeFrequent excludeFrequent;
|
||||
private List<DetectionRule> detectorRules = Collections.emptyList();
|
||||
|
||||
public Builder() {
|
||||
}
|
||||
|
||||
public Builder(Detector detector) {
|
||||
detectorDescription = detector.detectorDescription;
|
||||
function = detector.function;
|
||||
fieldName = detector.fieldName;
|
||||
byFieldName = detector.byFieldName;
|
||||
overFieldName = detector.overFieldName;
|
||||
partitionFieldName = detector.partitionFieldName;
|
||||
useNull = detector.useNull;
|
||||
excludeFrequent = detector.excludeFrequent;
|
||||
detectorRules = new ArrayList<>(detector.detectorRules.size());
|
||||
for (DetectionRule rule : detector.getDetectorRules()) {
|
||||
detectorRules.add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
public Builder(String function, String fieldName) {
|
||||
this.function = function;
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
public void setDetectorDescription(String detectorDescription) {
|
||||
this.detectorDescription = detectorDescription;
|
||||
}
|
||||
|
||||
public void setFunction(String function) {
|
||||
this.function = function;
|
||||
}
|
||||
|
||||
public void setFieldName(String fieldName) {
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
public void setByFieldName(String byFieldName) {
|
||||
this.byFieldName = byFieldName;
|
||||
}
|
||||
|
||||
public void setOverFieldName(String overFieldName) {
|
||||
this.overFieldName = overFieldName;
|
||||
}
|
||||
|
||||
public void setPartitionFieldName(String partitionFieldName) {
|
||||
this.partitionFieldName = partitionFieldName;
|
||||
}
|
||||
|
||||
public void setUseNull(boolean useNull) {
|
||||
this.useNull = useNull;
|
||||
}
|
||||
|
||||
public void setExcludeFrequent(ExcludeFrequent excludeFrequent) {
|
||||
this.excludeFrequent = excludeFrequent;
|
||||
}
|
||||
|
||||
public void setDetectorRules(List<DetectionRule> detectorRules) {
|
||||
this.detectorRules = detectorRules;
|
||||
}
|
||||
|
||||
public List<DetectionRule> getDetectorRules() {
|
||||
return detectorRules;
|
||||
}
|
||||
|
||||
public Detector build() {
|
||||
return build(false);
|
||||
}
|
||||
|
||||
public Detector build(boolean isSummarised) {
|
||||
boolean emptyField = Strings.isEmpty(fieldName);
|
||||
boolean emptyByField = Strings.isEmpty(byFieldName);
|
||||
boolean emptyOverField = Strings.isEmpty(overFieldName);
|
||||
if (Detector.ANALYSIS_FUNCTIONS.contains(function) == false) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_UNKNOWN_FUNCTION, function));
|
||||
}
|
||||
|
||||
if (emptyField && emptyByField && emptyOverField) {
|
||||
if (!Detector.COUNT_WITHOUT_FIELD_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_NO_ANALYSIS_FIELD_NOT_COUNT));
|
||||
}
|
||||
}
|
||||
|
||||
if (isSummarised && Detector.METRIC.equals(function)) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED, Detector.METRIC));
|
||||
}
|
||||
|
||||
// check functions have required fields
|
||||
|
||||
if (emptyField && Detector.FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_FIELDNAME, function));
|
||||
}
|
||||
|
||||
if (!emptyField && (Detector.FIELD_NAME_FUNCTIONS.contains(function) == false)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FIELDNAME_INCOMPATIBLE_FUNCTION, function));
|
||||
}
|
||||
|
||||
if (emptyByField && Detector.BY_FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_BYFIELD, function));
|
||||
}
|
||||
|
||||
if (!emptyByField && Detector.NO_BY_FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_BYFIELD_INCOMPATIBLE_FUNCTION, function));
|
||||
}
|
||||
|
||||
if (emptyOverField && Detector.OVER_FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_OVERFIELD, function));
|
||||
}
|
||||
|
||||
if (!emptyOverField && Detector.NO_OVER_FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_OVERFIELD_INCOMPATIBLE_FUNCTION, function));
|
||||
}
|
||||
|
||||
// field names cannot contain certain characters
|
||||
String[] fields = { fieldName, byFieldName, overFieldName, partitionFieldName };
|
||||
for (String field : fields) {
|
||||
verifyFieldName(field);
|
||||
}
|
||||
|
||||
String function = this.function == null ? Detector.METRIC : this.function;
|
||||
if (detectorRules.isEmpty() == false) {
|
||||
if (FUNCTIONS_WITHOUT_RULE_SUPPORT.contains(function)) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_NOT_SUPPORTED_BY_FUNCTION, function);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
for (DetectionRule rule : detectorRules) {
|
||||
checkScoping(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return new Detector(detectorDescription, function, fieldName, byFieldName, overFieldName, partitionFieldName,
|
||||
useNull, excludeFrequent, detectorRules);
|
||||
}
|
||||
|
||||
public List<String> extractAnalysisFields() {
|
||||
List<String> analysisFields = Arrays.asList(byFieldName,
|
||||
overFieldName, partitionFieldName);
|
||||
return analysisFields.stream().filter(item -> item != null).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the characters used in a field name will not cause problems.
|
||||
*
|
||||
* @param field
|
||||
* The field name to be validated
|
||||
* @return true
|
||||
*/
|
||||
public static boolean verifyFieldName(String field) throws ElasticsearchParseException {
|
||||
if (field != null && containsInvalidChar(field)) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_INVALID_FIELDNAME_CHARS, field, Detector.PROHIBITED));
|
||||
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static boolean containsInvalidChar(String field) {
|
||||
for (Character ch : Detector.PROHIBITED_FIELDNAME_CHARACTERS) {
|
||||
if (field.indexOf(ch) >= 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return field.chars().anyMatch(ch -> Character.isISOControl(ch));
|
||||
}
|
||||
|
||||
private void checkScoping(DetectionRule rule) throws ElasticsearchParseException {
|
||||
String targetFieldName = rule.getTargetFieldName();
|
||||
checkTargetFieldNameIsValid(extractAnalysisFields(), targetFieldName);
|
||||
List<String> validOptions = getValidFieldNameOptions(rule);
|
||||
for (RuleCondition condition : rule.getRuleConditions()) {
|
||||
if (!validOptions.contains(condition.getFieldName())) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_INVALID_FIELD_NAME, validOptions,
|
||||
condition.getFieldName());
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void checkTargetFieldNameIsValid(List<String> analysisFields, String targetFieldName)
|
||||
throws ElasticsearchParseException {
|
||||
if (targetFieldName != null && !analysisFields.contains(targetFieldName)) {
|
||||
String msg =
|
||||
Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_INVALID_TARGET_FIELD_NAME, analysisFields, targetFieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private List<String> getValidFieldNameOptions(DetectionRule rule) {
|
||||
List<String> result = new ArrayList<>();
|
||||
if (overFieldName != null) {
|
||||
result.add(byFieldName == null ? overFieldName : byFieldName);
|
||||
} else if (byFieldName != null) {
|
||||
result.add(byFieldName);
|
||||
}
|
||||
|
||||
if (rule.getTargetFieldName() != null) {
|
||||
ScopingLevel targetLevel = ScopingLevel.from(this, rule.getTargetFieldName());
|
||||
result = result.stream().filter(field -> targetLevel.isHigherThan(ScopingLevel.from(this, field)))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
if (isEmptyFieldNameAllowed(rule)) {
|
||||
result.add(null);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private boolean isEmptyFieldNameAllowed(DetectionRule rule) {
|
||||
List<String> analysisFields = extractAnalysisFields();
|
||||
return analysisFields.isEmpty() || (rule.getTargetFieldName() != null && analysisFields.size() == 1);
|
||||
}
|
||||
|
||||
enum ScopingLevel {
|
||||
PARTITION(3),
|
||||
OVER(2),
|
||||
BY(1);
|
||||
|
||||
int level;
|
||||
|
||||
ScopingLevel(int level) {
|
||||
this.level = level;
|
||||
}
|
||||
|
||||
boolean isHigherThan(ScopingLevel other) {
|
||||
return level > other.level;
|
||||
}
|
||||
|
||||
static ScopingLevel from(Detector.Builder detector, String fieldName) {
|
||||
if (fieldName.equals(detector.partitionFieldName)) {
|
||||
return ScopingLevel.PARTITION;
|
||||
}
|
||||
if (fieldName.equals(detector.overFieldName)) {
|
||||
return ScopingLevel.OVER;
|
||||
}
|
||||
if (fieldName.equals(detector.byFieldName)) {
|
||||
return ScopingLevel.BY;
|
||||
}
|
||||
throw new IllegalArgumentException(
|
||||
"fieldName '" + fieldName + "' does not match an analysis field");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public enum IgnoreDowntime implements Writeable {
|
||||
|
||||
NEVER, ONCE, ALWAYS;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Parses a string and returns the corresponding enum value.
|
||||
* </p>
|
||||
* <p>
|
||||
* The method differs from {@link #valueOf(String)} by being
|
||||
* able to handle leading/trailing whitespace and being case
|
||||
* insensitive.
|
||||
* </p>
|
||||
* <p>
|
||||
* If there is no match {@link IllegalArgumentException} is thrown.
|
||||
* </p>
|
||||
*
|
||||
* @param value A String that should match one of the enum values
|
||||
* @return the matching enum value
|
||||
*/
|
||||
public static IgnoreDowntime fromString(String value) {
|
||||
return valueOf(value.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static IgnoreDowntime fromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown public enum IgnoreDowntime {\n ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
@ -0,0 +1,672 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.MlStrings;
|
||||
import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
/**
|
||||
* This class represents a configured and created Job. The creation time is set
|
||||
* to the time the object was constructed, state is set to
|
||||
* {@link JobState#OPENING} and the finished time and last data time fields are
|
||||
* {@code null} until the job has seen some data or it is finished respectively.
|
||||
* If the job was created to read data from a list of files FileUrls will be a
|
||||
* non-empty list else the expects data to be streamed to it.
|
||||
*/
|
||||
public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent {
|
||||
|
||||
public static final String TYPE = "job";
|
||||
|
||||
/*
|
||||
* Field names used in serialization
|
||||
*/
|
||||
public static final ParseField ID = new ParseField("job_id");
|
||||
public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
|
||||
public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits");
|
||||
public static final ParseField CREATE_TIME = new ParseField("create_time");
|
||||
public static final ParseField CUSTOM_SETTINGS = new ParseField("custom_settings");
|
||||
public static final ParseField DATA_DESCRIPTION = new ParseField("data_description");
|
||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||
public static final ParseField FINISHED_TIME = new ParseField("finished_time");
|
||||
public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime");
|
||||
public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time");
|
||||
public static final ParseField MODEL_DEBUG_CONFIG = new ParseField("model_debug_config");
|
||||
public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days");
|
||||
public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval");
|
||||
public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("model_snapshot_retention_days");
|
||||
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
|
||||
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
|
||||
public static final ParseField INDEX_NAME = new ParseField("index_name");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("jobs");
|
||||
|
||||
public static final String ALL = "_all";
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("job_details", Builder::new);
|
||||
|
||||
public static final int MAX_JOB_ID_LENGTH = 64;
|
||||
public static final long MIN_BACKGROUND_PERSIST_INTERVAL = 3600;
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setId, ID);
|
||||
PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION);
|
||||
PARSER.declareField(Builder::setCreateTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + CREATE_TIME.getPreferredName() + "]");
|
||||
}, CREATE_TIME, ValueType.VALUE);
|
||||
PARSER.declareField(Builder::setFinishedTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException(
|
||||
"unexpected token [" + p.currentToken() + "] for [" + FINISHED_TIME.getPreferredName() + "]");
|
||||
}, FINISHED_TIME, ValueType.VALUE);
|
||||
PARSER.declareField(Builder::setLastDataTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException(
|
||||
"unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]");
|
||||
}, LAST_DATA_TIME, ValueType.VALUE);
|
||||
PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG);
|
||||
PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS);
|
||||
PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION);
|
||||
PARSER.declareObject(Builder::setModelDebugConfig, ModelDebugConfig.PARSER, MODEL_DEBUG_CONFIG);
|
||||
PARSER.declareField(Builder::setIgnoreDowntime, (p, c) -> IgnoreDowntime.fromString(p.text()), IGNORE_DOWNTIME, ValueType.STRING);
|
||||
PARSER.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS);
|
||||
PARSER.declareLong(Builder::setBackgroundPersistInterval, BACKGROUND_PERSIST_INTERVAL);
|
||||
PARSER.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS);
|
||||
PARSER.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS);
|
||||
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
|
||||
PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
|
||||
PARSER.declareString(Builder::setIndexName, INDEX_NAME);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private final String description;
|
||||
// NORELEASE: Use Jodatime instead
|
||||
private final Date createTime;
|
||||
private final Date finishedTime;
|
||||
private final Date lastDataTime;
|
||||
private final AnalysisConfig analysisConfig;
|
||||
private final AnalysisLimits analysisLimits;
|
||||
private final DataDescription dataDescription;
|
||||
private final ModelDebugConfig modelDebugConfig;
|
||||
private final IgnoreDowntime ignoreDowntime;
|
||||
private final Long renormalizationWindowDays;
|
||||
private final Long backgroundPersistInterval;
|
||||
private final Long modelSnapshotRetentionDays;
|
||||
private final Long resultsRetentionDays;
|
||||
private final Map<String, Object> customSettings;
|
||||
private final String modelSnapshotId;
|
||||
private final String indexName;
|
||||
|
||||
public Job(String jobId, String description, Date createTime, Date finishedTime, Date lastDataTime,
|
||||
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
|
||||
ModelDebugConfig modelDebugConfig, IgnoreDowntime ignoreDowntime,
|
||||
Long renormalizationWindowDays, Long backgroundPersistInterval, Long modelSnapshotRetentionDays, Long resultsRetentionDays,
|
||||
Map<String, Object> customSettings, String modelSnapshotId, String indexName) {
|
||||
|
||||
if (analysisConfig == null) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_MISSING_ANALYSISCONFIG));
|
||||
}
|
||||
|
||||
checkValueNotLessThan(0, "renormalizationWindowDays", renormalizationWindowDays);
|
||||
checkValueNotLessThan(MIN_BACKGROUND_PERSIST_INTERVAL, "backgroundPersistInterval", backgroundPersistInterval);
|
||||
checkValueNotLessThan(0, "modelSnapshotRetentionDays", modelSnapshotRetentionDays);
|
||||
checkValueNotLessThan(0, "resultsRetentionDays", resultsRetentionDays);
|
||||
|
||||
if (!MlStrings.isValidId(jobId)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName(), jobId));
|
||||
}
|
||||
if (jobId.length() > MAX_JOB_ID_LENGTH) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_ID_TOO_LONG, MAX_JOB_ID_LENGTH));
|
||||
}
|
||||
|
||||
if (Strings.isNullOrEmpty(indexName)) {
|
||||
indexName = jobId;
|
||||
} else if (!MlStrings.isValidId(indexName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_ID, INDEX_NAME.getPreferredName()));
|
||||
}
|
||||
|
||||
this.jobId = jobId;
|
||||
this.description = description;
|
||||
this.createTime = createTime;
|
||||
this.finishedTime = finishedTime;
|
||||
this.lastDataTime = lastDataTime;
|
||||
this.analysisConfig = analysisConfig;
|
||||
this.analysisLimits = analysisLimits;
|
||||
this.dataDescription = dataDescription;
|
||||
this.modelDebugConfig = modelDebugConfig;
|
||||
this.ignoreDowntime = ignoreDowntime;
|
||||
this.renormalizationWindowDays = renormalizationWindowDays;
|
||||
this.backgroundPersistInterval = backgroundPersistInterval;
|
||||
this.modelSnapshotRetentionDays = modelSnapshotRetentionDays;
|
||||
this.resultsRetentionDays = resultsRetentionDays;
|
||||
this.customSettings = customSettings;
|
||||
this.modelSnapshotId = modelSnapshotId;
|
||||
this.indexName = indexName;
|
||||
}
|
||||
|
||||
public Job(StreamInput in) throws IOException {
|
||||
jobId = in.readString();
|
||||
description = in.readOptionalString();
|
||||
createTime = new Date(in.readVLong());
|
||||
finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
lastDataTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
analysisConfig = new AnalysisConfig(in);
|
||||
analysisLimits = in.readOptionalWriteable(AnalysisLimits::new);
|
||||
dataDescription = in.readOptionalWriteable(DataDescription::new);
|
||||
modelDebugConfig = in.readOptionalWriteable(ModelDebugConfig::new);
|
||||
ignoreDowntime = in.readOptionalWriteable(IgnoreDowntime::fromStream);
|
||||
renormalizationWindowDays = in.readOptionalLong();
|
||||
backgroundPersistInterval = in.readOptionalLong();
|
||||
modelSnapshotRetentionDays = in.readOptionalLong();
|
||||
resultsRetentionDays = in.readOptionalLong();
|
||||
customSettings = in.readMap();
|
||||
modelSnapshotId = in.readOptionalString();
|
||||
indexName = in.readString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the Job Id.
|
||||
*
|
||||
* @return The job Id string
|
||||
*/
|
||||
public String getId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the index storing the job's results and state.
|
||||
* This defaults to {@link #getId()} if a specific index name is not set.
|
||||
* @return The job's index name
|
||||
*/
|
||||
public String getIndexName() {
|
||||
return indexName;
|
||||
}
|
||||
|
||||
/**
|
||||
* The job description
|
||||
*
|
||||
* @return job description
|
||||
*/
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
/**
|
||||
* The Job creation time. This name is preferred when serialising to the
|
||||
* REST API.
|
||||
*
|
||||
* @return The date the job was created
|
||||
*/
|
||||
public Date getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The Job creation time. This name is preferred when serialising to the
|
||||
* data store.
|
||||
*
|
||||
* @return The date the job was created
|
||||
*/
|
||||
public Date getAtTimestamp() {
|
||||
return createTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The time the job was finished or <code>null</code> if not finished.
|
||||
*
|
||||
* @return The date the job was last retired or <code>null</code>
|
||||
*/
|
||||
public Date getFinishedTime() {
|
||||
return finishedTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The last time data was uploaded to the job or <code>null</code> if no
|
||||
* data has been seen.
|
||||
*
|
||||
* @return The date at which the last data was processed
|
||||
*/
|
||||
public Date getLastDataTime() {
|
||||
return lastDataTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The analysis configuration object
|
||||
*
|
||||
* @return The AnalysisConfig
|
||||
*/
|
||||
public AnalysisConfig getAnalysisConfig() {
|
||||
return analysisConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* The analysis options object
|
||||
*
|
||||
* @return The AnalysisLimits
|
||||
*/
|
||||
public AnalysisLimits getAnalysisLimits() {
|
||||
return analysisLimits;
|
||||
}
|
||||
|
||||
public IgnoreDowntime getIgnoreDowntime() {
|
||||
return ignoreDowntime;
|
||||
}
|
||||
|
||||
public ModelDebugConfig getModelDebugConfig() {
|
||||
return modelDebugConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* If not set the input data is assumed to be csv with a '_time' field in
|
||||
* epoch format.
|
||||
*
|
||||
* @return A DataDescription or <code>null</code>
|
||||
* @see DataDescription
|
||||
*/
|
||||
public DataDescription getDataDescription() {
|
||||
return dataDescription;
|
||||
}
|
||||
|
||||
/**
|
||||
* The duration of the renormalization window in days
|
||||
*
|
||||
* @return renormalization window in days
|
||||
*/
|
||||
public Long getRenormalizationWindowDays() {
|
||||
return renormalizationWindowDays;
|
||||
}
|
||||
|
||||
/**
|
||||
* The background persistence interval in seconds
|
||||
*
|
||||
* @return background persistence interval in seconds
|
||||
*/
|
||||
public Long getBackgroundPersistInterval() {
|
||||
return backgroundPersistInterval;
|
||||
}
|
||||
|
||||
public Long getModelSnapshotRetentionDays() {
|
||||
return modelSnapshotRetentionDays;
|
||||
}
|
||||
|
||||
public Long getResultsRetentionDays() {
|
||||
return resultsRetentionDays;
|
||||
}
|
||||
|
||||
public Map<String, Object> getCustomSettings() {
|
||||
return customSettings;
|
||||
}
|
||||
|
||||
public String getModelSnapshotId() {
|
||||
return modelSnapshotId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of all input data fields mentioned in the job configuration,
|
||||
* namely analysis fields and the time field.
|
||||
*
|
||||
* @return the list of fields - never <code>null</code>
|
||||
*/
|
||||
public List<String> allFields() {
|
||||
Set<String> allFields = new TreeSet<>();
|
||||
|
||||
// analysis fields
|
||||
if (analysisConfig != null) {
|
||||
allFields.addAll(analysisConfig.analysisFields());
|
||||
}
|
||||
|
||||
// time field
|
||||
if (dataDescription != null) {
|
||||
String timeField = dataDescription.getTimeField();
|
||||
if (timeField != null) {
|
||||
allFields.add(timeField);
|
||||
}
|
||||
}
|
||||
|
||||
// remove empty strings
|
||||
allFields.remove("");
|
||||
|
||||
return new ArrayList<>(allFields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
out.writeOptionalString(description);
|
||||
out.writeVLong(createTime.getTime());
|
||||
if (finishedTime != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeVLong(finishedTime.getTime());
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (lastDataTime != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeVLong(lastDataTime.getTime());
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
analysisConfig.writeTo(out);
|
||||
out.writeOptionalWriteable(analysisLimits);
|
||||
out.writeOptionalWriteable(dataDescription);
|
||||
out.writeOptionalWriteable(modelDebugConfig);
|
||||
out.writeOptionalWriteable(ignoreDowntime);
|
||||
out.writeOptionalLong(renormalizationWindowDays);
|
||||
out.writeOptionalLong(backgroundPersistInterval);
|
||||
out.writeOptionalLong(modelSnapshotRetentionDays);
|
||||
out.writeOptionalLong(resultsRetentionDays);
|
||||
out.writeMap(customSettings);
|
||||
out.writeOptionalString(modelSnapshotId);
|
||||
out.writeString(indexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
final String humanReadableSuffix = "_string";
|
||||
|
||||
builder.field(ID.getPreferredName(), jobId);
|
||||
if (description != null) {
|
||||
builder.field(DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
builder.dateField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + humanReadableSuffix, createTime.getTime());
|
||||
if (finishedTime != null) {
|
||||
builder.dateField(FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix,
|
||||
finishedTime.getTime());
|
||||
}
|
||||
if (lastDataTime != null) {
|
||||
builder.dateField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + humanReadableSuffix,
|
||||
lastDataTime.getTime());
|
||||
}
|
||||
builder.field(ANALYSIS_CONFIG.getPreferredName(), analysisConfig, params);
|
||||
if (analysisLimits != null) {
|
||||
builder.field(ANALYSIS_LIMITS.getPreferredName(), analysisLimits, params);
|
||||
}
|
||||
if (dataDescription != null) {
|
||||
builder.field(DATA_DESCRIPTION.getPreferredName(), dataDescription, params);
|
||||
}
|
||||
if (modelDebugConfig != null) {
|
||||
builder.field(MODEL_DEBUG_CONFIG.getPreferredName(), modelDebugConfig, params);
|
||||
}
|
||||
if (ignoreDowntime != null) {
|
||||
builder.field(IGNORE_DOWNTIME.getPreferredName(), ignoreDowntime);
|
||||
}
|
||||
if (renormalizationWindowDays != null) {
|
||||
builder.field(RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays);
|
||||
}
|
||||
if (backgroundPersistInterval != null) {
|
||||
builder.field(BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval);
|
||||
}
|
||||
if (modelSnapshotRetentionDays != null) {
|
||||
builder.field(MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays);
|
||||
}
|
||||
if (resultsRetentionDays != null) {
|
||||
builder.field(RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays);
|
||||
}
|
||||
if (customSettings != null) {
|
||||
builder.field(CUSTOM_SETTINGS.getPreferredName(), customSettings);
|
||||
}
|
||||
if (modelSnapshotId != null) {
|
||||
builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId);
|
||||
}
|
||||
builder.field(INDEX_NAME.getPreferredName(), indexName);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other instanceof Job == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Job that = (Job) other;
|
||||
return Objects.equals(this.jobId, that.jobId) && Objects.equals(this.description, that.description)
|
||||
&& Objects.equals(this.createTime, that.createTime)
|
||||
&& Objects.equals(this.finishedTime, that.finishedTime)
|
||||
&& Objects.equals(this.lastDataTime, that.lastDataTime)
|
||||
&& Objects.equals(this.analysisConfig, that.analysisConfig)
|
||||
&& Objects.equals(this.analysisLimits, that.analysisLimits) && Objects.equals(this.dataDescription, that.dataDescription)
|
||||
&& Objects.equals(this.modelDebugConfig, that.modelDebugConfig)
|
||||
&& Objects.equals(this.ignoreDowntime, that.ignoreDowntime)
|
||||
&& Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays)
|
||||
&& Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval)
|
||||
&& Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays)
|
||||
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
|
||||
&& Objects.equals(this.customSettings, that.customSettings)
|
||||
&& Objects.equals(this.modelSnapshotId, that.modelSnapshotId)
|
||||
&& Objects.equals(this.indexName, that.indexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, description, createTime, finishedTime, lastDataTime, analysisConfig,
|
||||
analysisLimits, dataDescription, modelDebugConfig, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, ignoreDowntime, customSettings,
|
||||
modelSnapshotId, indexName);
|
||||
}
|
||||
|
||||
// Class alreadt extends from AbstractDiffable, so copied from ToXContentToBytes#toString()
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
private static void checkValueNotLessThan(long minVal, String name, Long value) {
|
||||
if (value != null && value < minVal) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, name, minVal, value));
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private String id;
|
||||
private String description;
|
||||
|
||||
private AnalysisConfig analysisConfig;
|
||||
private AnalysisLimits analysisLimits;
|
||||
private DataDescription dataDescription;
|
||||
private Date createTime;
|
||||
private Date finishedTime;
|
||||
private Date lastDataTime;
|
||||
private ModelDebugConfig modelDebugConfig;
|
||||
private Long renormalizationWindowDays;
|
||||
private Long backgroundPersistInterval;
|
||||
private Long modelSnapshotRetentionDays;
|
||||
private Long resultsRetentionDays;
|
||||
private IgnoreDowntime ignoreDowntime;
|
||||
private Map<String, Object> customSettings;
|
||||
private String modelSnapshotId;
|
||||
private String indexName;
|
||||
|
||||
public Builder() {
|
||||
}
|
||||
|
||||
public Builder(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Builder(Job job) {
|
||||
this.id = job.getId();
|
||||
this.description = job.getDescription();
|
||||
this.analysisConfig = job.getAnalysisConfig();
|
||||
this.dataDescription = job.getDataDescription();
|
||||
this.createTime = job.getCreateTime();
|
||||
this.finishedTime = job.getFinishedTime();
|
||||
this.lastDataTime = job.getLastDataTime();
|
||||
this.modelDebugConfig = job.getModelDebugConfig();
|
||||
this.renormalizationWindowDays = job.getRenormalizationWindowDays();
|
||||
this.backgroundPersistInterval = job.getBackgroundPersistInterval();
|
||||
this.resultsRetentionDays = job.getResultsRetentionDays();
|
||||
this.ignoreDowntime = job.getIgnoreDowntime();
|
||||
this.customSettings = job.getCustomSettings();
|
||||
this.modelSnapshotId = job.getModelSnapshotId();
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setCustomSettings(Map<String, Object> customSettings) {
|
||||
this.customSettings = customSettings;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public void setAnalysisConfig(AnalysisConfig.Builder configBuilder) {
|
||||
analysisConfig = configBuilder.build();
|
||||
}
|
||||
|
||||
public void setAnalysisLimits(AnalysisLimits analysisLimits) {
|
||||
if (this.analysisLimits != null) {
|
||||
long oldMemoryLimit = this.analysisLimits.getModelMemoryLimit();
|
||||
long newMemoryLimit = analysisLimits.getModelMemoryLimit();
|
||||
if (newMemoryLimit < oldMemoryLimit) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_MODEL_MEMORY_LIMIT_CANNOT_BE_DECREASED,
|
||||
oldMemoryLimit, newMemoryLimit));
|
||||
}
|
||||
}
|
||||
this.analysisLimits = analysisLimits;
|
||||
}
|
||||
|
||||
public void setCreateTime(Date createTime) {
|
||||
this.createTime = createTime;
|
||||
}
|
||||
|
||||
public void setFinishedTime(Date finishedTime) {
|
||||
this.finishedTime = finishedTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the wall clock time of the last data upload
|
||||
* @param lastDataTime Wall clock time
|
||||
*/
|
||||
public void setLastDataTime(Date lastDataTime) {
|
||||
this.lastDataTime = lastDataTime;
|
||||
}
|
||||
|
||||
public void setDataDescription(DataDescription.Builder description) {
|
||||
dataDescription = description.build();
|
||||
}
|
||||
|
||||
public void setModelDebugConfig(ModelDebugConfig modelDebugConfig) {
|
||||
this.modelDebugConfig = modelDebugConfig;
|
||||
}
|
||||
|
||||
public void setBackgroundPersistInterval(Long backgroundPersistInterval) {
|
||||
this.backgroundPersistInterval = backgroundPersistInterval;
|
||||
}
|
||||
|
||||
public void setRenormalizationWindowDays(Long renormalizationWindowDays) {
|
||||
this.renormalizationWindowDays = renormalizationWindowDays;
|
||||
}
|
||||
|
||||
public void setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) {
|
||||
this.modelSnapshotRetentionDays = modelSnapshotRetentionDays;
|
||||
}
|
||||
|
||||
public void setResultsRetentionDays(Long resultsRetentionDays) {
|
||||
this.resultsRetentionDays = resultsRetentionDays;
|
||||
}
|
||||
|
||||
public void setIgnoreDowntime(IgnoreDowntime ignoreDowntime) {
|
||||
this.ignoreDowntime = ignoreDowntime;
|
||||
}
|
||||
|
||||
public void setModelSnapshotId(String modelSnapshotId) {
|
||||
this.modelSnapshotId = modelSnapshotId;
|
||||
}
|
||||
|
||||
public void setIndexName(String indexName) {
|
||||
this.indexName = indexName;
|
||||
}
|
||||
|
||||
public Job build() {
|
||||
return build(false, null);
|
||||
}
|
||||
|
||||
public Job build(boolean fromApi, String urlJobId) {
|
||||
|
||||
Date createTime;
|
||||
Date finishedTime;
|
||||
Date lastDataTime;
|
||||
String modelSnapshotId;
|
||||
if (fromApi) {
|
||||
if (id == null) {
|
||||
id = urlJobId;
|
||||
} else if (!id.equals(urlJobId)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.INCONSISTENT_ID, ID.getPreferredName(), id, urlJobId));
|
||||
}
|
||||
createTime = this.createTime == null ? new Date() : this.createTime;
|
||||
finishedTime = null;
|
||||
lastDataTime = null;
|
||||
modelSnapshotId = null;
|
||||
} else {
|
||||
createTime = this.createTime;
|
||||
finishedTime = this.finishedTime;
|
||||
lastDataTime = this.lastDataTime;
|
||||
modelSnapshotId = this.modelSnapshotId;
|
||||
}
|
||||
|
||||
return new Job(
|
||||
id, description, createTime, finishedTime, lastDataTime, analysisConfig, analysisLimits,
|
||||
dataDescription, modelDebugConfig, ignoreDowntime, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, modelSnapshotId,
|
||||
indexName
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Jobs whether running or complete are in one of these states.
|
||||
* When a job is created it is initialised in to the state closed
|
||||
* i.e. it is not running.
|
||||
*/
|
||||
public enum JobState implements Writeable {
|
||||
|
||||
CLOSING, CLOSED, OPENING, OPENED, FAILED, DELETING;
|
||||
|
||||
public static JobState fromString(String name) {
|
||||
return valueOf(name.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static JobState fromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown public enum JobState {\n ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@code true} if state matches any of the given {@code candidates}
|
||||
*/
|
||||
public boolean isAnyOf(JobState... candidates) {
|
||||
return Arrays.stream(candidates).anyMatch(candidate -> this == candidate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
@ -0,0 +1,380 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.internal.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class JobUpdate implements Writeable, ToXContent {
|
||||
public static final ParseField DETECTORS = new ParseField("detectors");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<JobUpdate, Void> PARSER =
|
||||
new ConstructingObjectParser<>("job_update", a -> new JobUpdate((String) a[0], (List<DetectorUpdate>) a[1],
|
||||
(ModelDebugConfig) a[2], (AnalysisLimits) a[3], (Long) a[4], (Long) a[5], (Long) a[6], (Long) a[7],
|
||||
(List<String>) a[8], (Map<String, Object>) a[9]));
|
||||
|
||||
|
||||
static {
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION);
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), DetectorUpdate.PARSER, DETECTORS);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelDebugConfig.PARSER, Job.MODEL_DEBUG_CONFIG);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), AnalysisLimits.PARSER, Job.ANALYSIS_LIMITS);
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), Job.BACKGROUND_PERSIST_INTERVAL);
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), Job.RENORMALIZATION_WINDOW_DAYS);
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), Job.RESULTS_RETENTION_DAYS);
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), Job.MODEL_SNAPSHOT_RETENTION_DAYS);
|
||||
PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), AnalysisConfig.CATEGORIZATION_FILTERS);
|
||||
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.map(), Job.CUSTOM_SETTINGS,
|
||||
ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
private final String description;
|
||||
private final List<DetectorUpdate> detectorUpdates;
|
||||
private final ModelDebugConfig modelDebugConfig;
|
||||
private final AnalysisLimits analysisLimits;
|
||||
private final Long renormalizationWindowDays;
|
||||
private final Long backgroundPersistInterval;
|
||||
private final Long modelSnapshotRetentionDays;
|
||||
private final Long resultsRetentionDays;
|
||||
private final List<String> categorizationFilters;
|
||||
private final Map<String, Object> customSettings;
|
||||
|
||||
public JobUpdate(@Nullable String description, @Nullable List<DetectorUpdate> detectorUpdates,
|
||||
@Nullable ModelDebugConfig modelDebugConfig, @Nullable AnalysisLimits analysisLimits,
|
||||
@Nullable Long backgroundPersistInterval, @Nullable Long renormalizationWindowDays,
|
||||
@Nullable Long resultsRetentionDays, @Nullable Long modelSnapshotRetentionDays,
|
||||
@Nullable List<String> categorisationFilters, @Nullable Map<String, Object> customSettings) {
|
||||
this.description = description;
|
||||
this.detectorUpdates = detectorUpdates;
|
||||
this.modelDebugConfig = modelDebugConfig;
|
||||
this.analysisLimits = analysisLimits;
|
||||
this.renormalizationWindowDays = renormalizationWindowDays;
|
||||
this.backgroundPersistInterval = backgroundPersistInterval;
|
||||
this.modelSnapshotRetentionDays = modelSnapshotRetentionDays;
|
||||
this.resultsRetentionDays = resultsRetentionDays;
|
||||
this.categorizationFilters = categorisationFilters;
|
||||
this.customSettings = customSettings;
|
||||
}
|
||||
|
||||
public JobUpdate(StreamInput in) throws IOException {
|
||||
description = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
detectorUpdates = in.readList(DetectorUpdate::new);
|
||||
} else {
|
||||
detectorUpdates = null;
|
||||
}
|
||||
modelDebugConfig = in.readOptionalWriteable(ModelDebugConfig::new);
|
||||
analysisLimits = in.readOptionalWriteable(AnalysisLimits::new);
|
||||
renormalizationWindowDays = in.readOptionalLong();
|
||||
backgroundPersistInterval = in.readOptionalLong();
|
||||
modelSnapshotRetentionDays = in.readOptionalLong();
|
||||
resultsRetentionDays = in.readOptionalLong();
|
||||
if (in.readBoolean()) {
|
||||
categorizationFilters = in.readList(StreamInput::readString);
|
||||
} else {
|
||||
categorizationFilters = null;
|
||||
}
|
||||
customSettings = in.readMap();
|
||||
}
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalString(description);
|
||||
out.writeBoolean(detectorUpdates != null);
|
||||
if (detectorUpdates != null) {
|
||||
out.writeList(detectorUpdates);
|
||||
}
|
||||
out.writeOptionalWriteable(modelDebugConfig);
|
||||
out.writeOptionalWriteable(analysisLimits);
|
||||
out.writeOptionalLong(renormalizationWindowDays);
|
||||
out.writeOptionalLong(backgroundPersistInterval);
|
||||
out.writeOptionalLong(modelSnapshotRetentionDays);
|
||||
out.writeOptionalLong(resultsRetentionDays);
|
||||
out.writeBoolean(categorizationFilters != null);
|
||||
if (categorizationFilters != null) {
|
||||
out.writeStringList(categorizationFilters);
|
||||
}
|
||||
out.writeMap(customSettings);
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public List<DetectorUpdate> getDetectorUpdates() {
|
||||
return detectorUpdates;
|
||||
}
|
||||
|
||||
public ModelDebugConfig getModelDebugConfig() {
|
||||
return modelDebugConfig;
|
||||
}
|
||||
|
||||
public AnalysisLimits getAnalysisLimits() {
|
||||
return analysisLimits;
|
||||
}
|
||||
|
||||
public Long getRenormalizationWindowDays() {
|
||||
return renormalizationWindowDays;
|
||||
}
|
||||
|
||||
public Long getBackgroundPersistInterval() {
|
||||
return backgroundPersistInterval;
|
||||
}
|
||||
|
||||
public Long getModelSnapshotRetentionDays() {
|
||||
return modelSnapshotRetentionDays;
|
||||
}
|
||||
|
||||
public Long getResultsRetentionDays() {
|
||||
return resultsRetentionDays;
|
||||
}
|
||||
|
||||
public List<String> getCategorizationFilters() {
|
||||
return categorizationFilters;
|
||||
}
|
||||
|
||||
public Map<String, Object> getCustomSettings() {
|
||||
return customSettings;
|
||||
}
|
||||
|
||||
public boolean isAutodetectProcessUpdate() {
|
||||
return modelDebugConfig != null || detectorUpdates != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (description != null) {
|
||||
builder.field(Job.DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
if (detectorUpdates != null) {
|
||||
builder.field(DETECTORS.getPreferredName(), detectorUpdates);
|
||||
}
|
||||
if (modelDebugConfig != null) {
|
||||
builder.field(Job.MODEL_DEBUG_CONFIG.getPreferredName(), modelDebugConfig);
|
||||
}
|
||||
if (analysisLimits != null) {
|
||||
builder.field(Job.ANALYSIS_LIMITS.getPreferredName(), analysisLimits);
|
||||
}
|
||||
if (renormalizationWindowDays != null) {
|
||||
builder.field(Job.RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays);
|
||||
}
|
||||
if (backgroundPersistInterval != null) {
|
||||
builder.field(Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval);
|
||||
}
|
||||
if (modelSnapshotRetentionDays != null) {
|
||||
builder.field(Job.MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays);
|
||||
}
|
||||
if (resultsRetentionDays != null) {
|
||||
builder.field(Job.RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays);
|
||||
}
|
||||
if (categorizationFilters != null) {
|
||||
builder.field(AnalysisConfig.CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters);
|
||||
}
|
||||
if (customSettings != null) {
|
||||
builder.field(Job.CUSTOM_SETTINGS.getPreferredName(), customSettings);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates {@code source} with the new values in this object returning a new {@link Job}.
|
||||
*
|
||||
* @param source Source job to be updated
|
||||
* @return A new job equivalent to {@code source} updated.
|
||||
*/
|
||||
public Job mergeWithJob(Job source) {
|
||||
Job.Builder builder = new Job.Builder(source);
|
||||
if (description != null) {
|
||||
builder.setDescription(description);
|
||||
}
|
||||
if (detectorUpdates != null && detectorUpdates.isEmpty() == false) {
|
||||
AnalysisConfig ac = source.getAnalysisConfig();
|
||||
int numDetectors = ac.getDetectors().size();
|
||||
for (DetectorUpdate dd : detectorUpdates) {
|
||||
if (dd.getIndex() >= numDetectors) {
|
||||
throw new IllegalArgumentException("Detector index is >= the number of detectors");
|
||||
}
|
||||
|
||||
Detector.Builder detectorbuilder = new Detector.Builder(ac.getDetectors().get(dd.getIndex()));
|
||||
if (dd.getDescription() != null) {
|
||||
detectorbuilder.setDetectorDescription(dd.getDescription());
|
||||
}
|
||||
if (dd.getRules() != null) {
|
||||
detectorbuilder.setDetectorRules(dd.getRules());
|
||||
}
|
||||
ac.getDetectors().set(dd.getIndex(), detectorbuilder.build());
|
||||
}
|
||||
|
||||
AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(ac);
|
||||
builder.setAnalysisConfig(acBuilder);
|
||||
}
|
||||
if (modelDebugConfig != null) {
|
||||
builder.setModelDebugConfig(modelDebugConfig);
|
||||
}
|
||||
if (analysisLimits != null) {
|
||||
builder.setAnalysisLimits(analysisLimits);
|
||||
}
|
||||
if (renormalizationWindowDays != null) {
|
||||
builder.setRenormalizationWindowDays(renormalizationWindowDays);
|
||||
}
|
||||
if (backgroundPersistInterval != null) {
|
||||
builder.setBackgroundPersistInterval(backgroundPersistInterval);
|
||||
}
|
||||
if (modelSnapshotRetentionDays != null) {
|
||||
builder.setModelSnapshotRetentionDays(modelSnapshotRetentionDays);
|
||||
}
|
||||
if (resultsRetentionDays != null) {
|
||||
builder.setResultsRetentionDays(resultsRetentionDays);
|
||||
}
|
||||
if (categorizationFilters != null) {
|
||||
AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(source.getAnalysisConfig());
|
||||
analysisConfigBuilder.setCategorizationFilters(categorizationFilters);
|
||||
builder.setAnalysisConfig(analysisConfigBuilder);
|
||||
}
|
||||
if (customSettings != null) {
|
||||
builder.setCustomSettings(customSettings);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other instanceof JobUpdate == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
JobUpdate that = (JobUpdate) other;
|
||||
|
||||
return Objects.equals(this.description, that.description)
|
||||
&& Objects.equals(this.detectorUpdates, that.detectorUpdates)
|
||||
&& Objects.equals(this.modelDebugConfig, that.modelDebugConfig)
|
||||
&& Objects.equals(this.analysisLimits, that.analysisLimits)
|
||||
&& Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays)
|
||||
&& Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval)
|
||||
&& Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays)
|
||||
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
|
||||
&& Objects.equals(this.categorizationFilters, that.categorizationFilters)
|
||||
&& Objects.equals(this.customSettings, that.customSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(description, detectorUpdates, modelDebugConfig, analysisLimits, renormalizationWindowDays,
|
||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, categorizationFilters, customSettings);
|
||||
}
|
||||
|
||||
public static class DetectorUpdate implements Writeable, ToXContent {
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<DetectorUpdate, Void> PARSER =
|
||||
new ConstructingObjectParser<>("detector_update", a -> new DetectorUpdate((int) a[0], (String) a[1],
|
||||
(List<DetectionRule>) a[2]));
|
||||
|
||||
public static final ParseField INDEX = new ParseField("index");
|
||||
public static final ParseField RULES = new ParseField("rules");
|
||||
|
||||
static {
|
||||
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), INDEX);
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION);
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), DetectionRule.PARSER, RULES);
|
||||
}
|
||||
|
||||
private int index;
|
||||
private String description;
|
||||
private List<DetectionRule> rules;
|
||||
|
||||
public DetectorUpdate(int index, String description, List<DetectionRule> rules) {
|
||||
this.index = index;
|
||||
this.description = description;
|
||||
this.rules = rules;
|
||||
}
|
||||
|
||||
public DetectorUpdate(StreamInput in) throws IOException {
|
||||
index = in.readInt();
|
||||
description = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
rules = in.readList(DetectionRule::new);
|
||||
} else {
|
||||
rules = null;
|
||||
}
|
||||
}
|
||||
|
||||
public int getIndex() {
|
||||
return index;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public List<DetectionRule> getRules() {
|
||||
return rules;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeInt(index);
|
||||
out.writeOptionalString(description);
|
||||
out.writeBoolean(rules != null);
|
||||
if (rules != null) {
|
||||
out.writeList(rules);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
||||
builder.field(INDEX.getPreferredName(), index);
|
||||
if (description != null) {
|
||||
builder.field(Job.DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
if (rules != null) {
|
||||
builder.field(RULES.getPreferredName(), rules);
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(index, description, rules);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other instanceof DetectorUpdate == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DetectorUpdate that = (DetectorUpdate) other;
|
||||
return this.index == that.index && Objects.equals(this.description, that.description)
|
||||
&& Objects.equals(this.rules, that.rules);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,93 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class MlFilter extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField TYPE = new ParseField("filter");
|
||||
public static final ParseField ID = new ParseField("id");
|
||||
public static final ParseField ITEMS = new ParseField("items");
|
||||
|
||||
// For QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("filters");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<MlFilter, Void> PARSER = new ConstructingObjectParser<>(
|
||||
TYPE.getPreferredName(), a -> new MlFilter((String) a[0], (List<String>) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
|
||||
PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), ITEMS);
|
||||
}
|
||||
|
||||
private final String id;
|
||||
private final List<String> items;
|
||||
|
||||
public MlFilter(String id, List<String> items) {
|
||||
this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null");
|
||||
this.items = Objects.requireNonNull(items, ITEMS.getPreferredName() + " must not be null");
|
||||
}
|
||||
|
||||
public MlFilter(StreamInput in) throws IOException {
|
||||
id = in.readString();
|
||||
items = Arrays.asList(in.readStringArray());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(id);
|
||||
out.writeStringArray(items.toArray(new String[items.size()]));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ID.getPreferredName(), id);
|
||||
builder.field(ITEMS.getPreferredName(), items);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public List<String> getItems() {
|
||||
return new ArrayList<>(items);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(obj instanceof MlFilter)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MlFilter other = (MlFilter) obj;
|
||||
return id.equals(other.id) && items.equals(other.items);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, items);
|
||||
}
|
||||
}
|
@ -0,0 +1,98 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ModelDebugConfig extends ToXContentToBytes implements Writeable {
|
||||
|
||||
private static final double MAX_PERCENTILE = 100.0;
|
||||
|
||||
private static final ParseField TYPE_FIELD = new ParseField("model_debug_config");
|
||||
public static final ParseField BOUNDS_PERCENTILE_FIELD = new ParseField("bounds_percentile");
|
||||
public static final ParseField TERMS_FIELD = new ParseField("terms");
|
||||
|
||||
public static final ConstructingObjectParser<ModelDebugConfig, Void> PARSER = new ConstructingObjectParser<>(
|
||||
TYPE_FIELD.getPreferredName(), a -> new ModelDebugConfig((Double) a[0], (String) a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), BOUNDS_PERCENTILE_FIELD);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD);
|
||||
}
|
||||
|
||||
private final double boundsPercentile;
|
||||
private final String terms;
|
||||
|
||||
public ModelDebugConfig(double boundsPercentile, String terms) {
|
||||
if (boundsPercentile < 0.0 || boundsPercentile > MAX_PERCENTILE) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_MODEL_DEBUG_CONFIG_INVALID_BOUNDS_PERCENTILE);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
this.boundsPercentile = boundsPercentile;
|
||||
this.terms = terms;
|
||||
}
|
||||
|
||||
public ModelDebugConfig(StreamInput in) throws IOException {
|
||||
boundsPercentile = in.readDouble();
|
||||
terms = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeDouble(boundsPercentile);
|
||||
out.writeOptionalString(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(BOUNDS_PERCENTILE_FIELD.getPreferredName(), boundsPercentile);
|
||||
if (terms != null) {
|
||||
builder.field(TERMS_FIELD.getPreferredName(), terms);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public double getBoundsPercentile() {
|
||||
return this.boundsPercentile;
|
||||
}
|
||||
|
||||
public String getTerms() {
|
||||
return this.terms;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other instanceof ModelDebugConfig == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ModelDebugConfig that = (ModelDebugConfig) other;
|
||||
return Objects.equals(this.boundsPercentile, that.boundsPercentile) && Objects.equals(this.terms, that.terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(boundsPercentile, terms);
|
||||
}
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Enum representing logical comparisons on doubles
|
||||
*/
|
||||
public enum Operator implements Writeable {
|
||||
EQ {
|
||||
@Override
|
||||
public boolean test(double lhs, double rhs) {
|
||||
return Double.compare(lhs, rhs) == 0;
|
||||
}
|
||||
},
|
||||
GT {
|
||||
@Override
|
||||
public boolean test(double lhs, double rhs) {
|
||||
return Double.compare(lhs, rhs) > 0;
|
||||
}
|
||||
},
|
||||
GTE {
|
||||
@Override
|
||||
public boolean test(double lhs, double rhs) {
|
||||
return Double.compare(lhs, rhs) >= 0;
|
||||
}
|
||||
},
|
||||
LT {
|
||||
@Override
|
||||
public boolean test(double lhs, double rhs) {
|
||||
return Double.compare(lhs, rhs) < 0;
|
||||
}
|
||||
},
|
||||
LTE {
|
||||
@Override
|
||||
public boolean test(double lhs, double rhs) {
|
||||
return Double.compare(lhs, rhs) <= 0;
|
||||
}
|
||||
},
|
||||
MATCH {
|
||||
@Override
|
||||
public boolean match(Pattern pattern, String field) {
|
||||
Matcher match = pattern.matcher(field);
|
||||
return match.matches();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean expectsANumericArgument() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
public static final ParseField OPERATOR_FIELD = new ParseField("operator");
|
||||
|
||||
public boolean test(double lhs, double rhs) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean match(Pattern pattern, String field) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean expectsANumericArgument() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public static Operator fromString(String name) {
|
||||
return valueOf(name.trim().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static Operator readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown Operator ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
public enum RuleAction {
|
||||
FILTER_RESULTS;
|
||||
|
||||
/**
|
||||
* Case-insensitive from string method.
|
||||
*
|
||||
* @param value String representation
|
||||
* @return The rule action
|
||||
*/
|
||||
public static RuleAction fromString(String value) {
|
||||
return RuleAction.valueOf(value.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
@ -0,0 +1,245 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Objects;
|
||||
|
||||
public class RuleCondition extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField CONDITION_TYPE_FIELD = new ParseField("condition_type");
|
||||
public static final ParseField RULE_CONDITION_FIELD = new ParseField("rule_condition");
|
||||
public static final ParseField FIELD_NAME_FIELD = new ParseField("field_name");
|
||||
public static final ParseField FIELD_VALUE_FIELD = new ParseField("field_value");
|
||||
public static final ParseField VALUE_FILTER_FIELD = new ParseField("value_filter");
|
||||
|
||||
public static final ConstructingObjectParser<RuleCondition, Void> PARSER =
|
||||
new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(),
|
||||
a -> new RuleCondition((RuleConditionType) a[0], (String) a[1], (String) a[2], (Condition) a[3], (String) a[4]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return RuleConditionType.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, CONDITION_TYPE_FIELD, ValueType.STRING);
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_NAME_FIELD);
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_VALUE_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Condition.PARSER, Condition.CONDITION_FIELD);
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), VALUE_FILTER_FIELD);
|
||||
}
|
||||
|
||||
private final RuleConditionType conditionType;
|
||||
private final String fieldName;
|
||||
private final String fieldValue;
|
||||
private final Condition condition;
|
||||
private final String valueFilter;
|
||||
|
||||
public RuleCondition(StreamInput in) throws IOException {
|
||||
conditionType = RuleConditionType.readFromStream(in);
|
||||
condition = in.readOptionalWriteable(Condition::new);
|
||||
fieldName = in.readOptionalString();
|
||||
fieldValue = in.readOptionalString();
|
||||
valueFilter = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
conditionType.writeTo(out);
|
||||
out.writeOptionalWriteable(condition);
|
||||
out.writeOptionalString(fieldName);
|
||||
out.writeOptionalString(fieldValue);
|
||||
out.writeOptionalString(valueFilter);
|
||||
}
|
||||
|
||||
public RuleCondition(RuleConditionType conditionType, String fieldName, String fieldValue, Condition condition, String valueFilter) {
|
||||
this.conditionType = conditionType;
|
||||
this.fieldName = fieldName;
|
||||
this.fieldValue = fieldValue;
|
||||
this.condition = condition;
|
||||
this.valueFilter = valueFilter;
|
||||
|
||||
verifyFieldsBoundToType(this);
|
||||
verifyFieldValueRequiresFieldName(this);
|
||||
}
|
||||
|
||||
public RuleCondition(RuleCondition ruleCondition) {
|
||||
this.conditionType = ruleCondition.conditionType;
|
||||
this.fieldName = ruleCondition.fieldName;
|
||||
this.fieldValue = ruleCondition.fieldValue;
|
||||
this.condition = ruleCondition.condition;
|
||||
this.valueFilter = ruleCondition.valueFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(CONDITION_TYPE_FIELD.getPreferredName(), conditionType);
|
||||
if (condition != null) {
|
||||
builder.field(Condition.CONDITION_FIELD.getPreferredName(), condition);
|
||||
}
|
||||
if (fieldName != null) {
|
||||
builder.field(FIELD_NAME_FIELD.getPreferredName(), fieldName);
|
||||
}
|
||||
if (fieldValue != null) {
|
||||
builder.field(FIELD_VALUE_FIELD.getPreferredName(), fieldValue);
|
||||
}
|
||||
if (valueFilter != null) {
|
||||
builder.field(VALUE_FILTER_FIELD.getPreferredName(), valueFilter);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public RuleConditionType getConditionType() {
|
||||
return conditionType;
|
||||
}
|
||||
|
||||
/**
|
||||
* The field name for which the rule applies. Can be null, meaning rule
|
||||
* applies to all results.
|
||||
*/
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* The value of the field name for which the rule applies. When set, the
|
||||
* rule applies only to the results that have the fieldName/fieldValue pair.
|
||||
* When null, the rule applies to all values for of the specified field
|
||||
* name. Only applicable when fieldName is not null.
|
||||
*/
|
||||
public String getFieldValue() {
|
||||
return fieldValue;
|
||||
}
|
||||
|
||||
public Condition getCondition() {
|
||||
return condition;
|
||||
}
|
||||
|
||||
/**
|
||||
* The unique identifier of a filter. Required when the rule type is
|
||||
* categorical. Should be null for all other types.
|
||||
*/
|
||||
public String getValueFilter() {
|
||||
return valueFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj instanceof RuleCondition == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
RuleCondition other = (RuleCondition) obj;
|
||||
return Objects.equals(conditionType, other.conditionType) && Objects.equals(fieldName, other.fieldName)
|
||||
&& Objects.equals(fieldValue, other.fieldValue) && Objects.equals(condition, other.condition)
|
||||
&& Objects.equals(valueFilter, other.valueFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(conditionType, fieldName, fieldValue, condition, valueFilter);
|
||||
}
|
||||
|
||||
public static RuleCondition createCategorical(String fieldName, String valueFilter) {
|
||||
return new RuleCondition(RuleConditionType.CATEGORICAL, fieldName, null, null, valueFilter);
|
||||
}
|
||||
|
||||
private static void verifyFieldsBoundToType(RuleCondition ruleCondition) throws ElasticsearchParseException {
|
||||
switch (ruleCondition.getConditionType()) {
|
||||
case CATEGORICAL:
|
||||
verifyCategorical(ruleCondition);
|
||||
break;
|
||||
case NUMERICAL_ACTUAL:
|
||||
case NUMERICAL_TYPICAL:
|
||||
case NUMERICAL_DIFF_ABS:
|
||||
verifyNumerical(ruleCondition);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyCategorical(RuleCondition ruleCondition) throws ElasticsearchParseException {
|
||||
checkCategoricalHasNoField(Condition.CONDITION_FIELD.getPreferredName(), ruleCondition.getCondition());
|
||||
checkCategoricalHasNoField(RuleCondition.FIELD_VALUE_FIELD.getPreferredName(), ruleCondition.getFieldValue());
|
||||
checkCategoricalHasField(RuleCondition.VALUE_FILTER_FIELD.getPreferredName(), ruleCondition.getValueFilter());
|
||||
}
|
||||
|
||||
private static void checkCategoricalHasNoField(String fieldName, Object fieldValue) throws ElasticsearchParseException {
|
||||
if (fieldValue != null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_CATEGORICAL_INVALID_OPTION, fieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private static void checkCategoricalHasField(String fieldName, Object fieldValue) throws ElasticsearchParseException {
|
||||
if (fieldValue == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_CATEGORICAL_MISSING_OPTION, fieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyNumerical(RuleCondition ruleCondition) throws ElasticsearchParseException {
|
||||
checkNumericalHasNoField(RuleCondition.VALUE_FILTER_FIELD.getPreferredName(), ruleCondition.getValueFilter());
|
||||
checkNumericalHasField(Condition.CONDITION_FIELD.getPreferredName(), ruleCondition.getCondition());
|
||||
if (ruleCondition.getFieldName() != null && ruleCondition.getFieldValue() == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_WITH_FIELD_NAME_REQUIRES_FIELD_VALUE);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
checkNumericalConditionOparatorsAreValid(ruleCondition);
|
||||
}
|
||||
|
||||
private static void checkNumericalHasNoField(String fieldName, Object fieldValue) throws ElasticsearchParseException {
|
||||
if (fieldValue != null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_INVALID_OPTION, fieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private static void checkNumericalHasField(String fieldName, Object fieldValue) throws ElasticsearchParseException {
|
||||
if (fieldValue == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_MISSING_OPTION, fieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyFieldValueRequiresFieldName(RuleCondition ruleCondition) throws ElasticsearchParseException {
|
||||
if (ruleCondition.getFieldValue() != null && ruleCondition.getFieldName() == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_MISSING_FIELD_NAME,
|
||||
ruleCondition.getFieldValue());
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
static EnumSet<Operator> VALID_CONDITION_OPERATORS = EnumSet.of(Operator.LT, Operator.LTE, Operator.GT, Operator.GTE);
|
||||
|
||||
private static void checkNumericalConditionOparatorsAreValid(RuleCondition ruleCondition) throws ElasticsearchParseException {
|
||||
Operator operator = ruleCondition.getCondition().getOperator();
|
||||
if (!VALID_CONDITION_OPERATORS.contains(operator)) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_INVALID_OPERATOR, operator);
|
||||
throw new IllegalArgumentException(msg);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
public enum RuleConditionType implements Writeable {
|
||||
CATEGORICAL,
|
||||
NUMERICAL_ACTUAL,
|
||||
NUMERICAL_TYPICAL,
|
||||
NUMERICAL_DIFF_ABS;
|
||||
|
||||
/**
|
||||
* Case-insensitive from string method.
|
||||
*
|
||||
* @param value
|
||||
* String representation
|
||||
* @return The condition type
|
||||
*/
|
||||
public static RuleConditionType fromString(String value) {
|
||||
return RuleConditionType.valueOf(value.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public static RuleConditionType readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown RuleConditionType ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
@ -0,0 +1,242 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.messages;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Locale;
|
||||
import java.util.ResourceBundle;
|
||||
|
||||
/**
|
||||
* Defines the keys for all the message strings
|
||||
*/
|
||||
public final class Messages {
|
||||
/**
|
||||
* The base name of the bundle without the .properties extension
|
||||
* or locale
|
||||
*/
|
||||
private static final String BUNDLE_NAME = "org.elasticsearch.xpack.ml.job.messages.ml_messages";
|
||||
public static final String AUTODETECT_FLUSH_UNEXPTECTED_DEATH = "autodetect.flush.failed.unexpected.death";
|
||||
|
||||
public static final String CPU_LIMIT_JOB = "cpu.limit.jobs";
|
||||
|
||||
public static final String DATASTORE_ERROR_DELETING = "datastore.error.deleting";
|
||||
public static final String DATASTORE_ERROR_DELETING_MISSING_INDEX = "datastore.error.deleting.missing.index";
|
||||
public static final String DATASTORE_ERROR_EXECUTING_SCRIPT = "datastore.error.executing.script";
|
||||
|
||||
public static final String INVALID_ID = "invalid.id";
|
||||
public static final String INCONSISTENT_ID = "inconsistent.id";
|
||||
|
||||
public static final String LICENSE_LIMIT_DETECTORS = "license.limit.detectors";
|
||||
public static final String LICENSE_LIMIT_JOBS = "license.limit.jobs";
|
||||
public static final String LICENSE_LIMIT_DETECTORS_REACTIVATE = "license.limit.detectors.reactivate";
|
||||
public static final String LICENSE_LIMIT_JOBS_REACTIVATE = "license.limit.jobs.reactivate";
|
||||
public static final String LICENSE_LIMIT_PARTITIONS = "license.limit.partitions";
|
||||
|
||||
public static final String JOB_AUDIT_CREATED = "job.audit.created";
|
||||
public static final String JOB_AUDIT_DELETED = "job.audit.deleted";
|
||||
public static final String JOB_AUDIT_PAUSED = "job.audit.paused";
|
||||
public static final String JOB_AUDIT_RESUMED = "job.audit.resumed";
|
||||
public static final String JOB_AUDIT_UPDATED = "job.audit.updated";
|
||||
public static final String JOB_AUDIT_REVERTED = "job.audit.reverted";
|
||||
public static final String JOB_AUDIT_OLD_RESULTS_DELETED = "job.audit.old.results.deleted";
|
||||
public static final String JOB_AUDIT_SNAPSHOT_DELETED = "job.audit.snapshot.deleted";
|
||||
public static final String JOB_AUDIT_DATAFEED_STARTED_FROM_TO = "job.audit.datafeed.started.from.to";
|
||||
public static final String JOB_AUDIT_DATAFEED_CONTINUED_REALTIME = "job.audit.datafeed.continued.realtime";
|
||||
public static final String JOB_AUDIT_DATAFEED_STARTED_REALTIME = "job.audit.datafeed.started.realtime";
|
||||
public static final String JOB_AUDIT_DATAFEED_LOOKBACK_COMPLETED = "job.audit.datafeed.lookback.completed";
|
||||
public static final String JOB_AUDIT_DATAFEED_STOPPED = "job.audit.datafeed.stopped";
|
||||
public static final String JOB_AUDIT_DATAFEED_NO_DATA = "job.audit.datafeed.no.data";
|
||||
public static final String JOB_AUDIR_DATAFEED_DATA_SEEN_AGAIN = "job.audit.datafeed.data.seen.again";
|
||||
public static final String JOB_AUDIT_DATAFEED_DATA_ANALYSIS_ERROR = "job.audit.datafeed.data.analysis.error";
|
||||
public static final String JOB_AUDIT_DATAFEED_DATA_EXTRACTION_ERROR = "job.audit.datafeed.data.extraction.error";
|
||||
public static final String JOB_AUDIT_DATAFEED_RECOVERED = "job.audit.datafeed.recovered";
|
||||
|
||||
public static final String SYSTEM_AUDIT_STARTED = "system.audit.started";
|
||||
public static final String SYSTEM_AUDIT_SHUTDOWN = "system.audit.shutdown";
|
||||
|
||||
public static final String JOB_CANNOT_DELETE_WHILE_RUNNING = "job.cannot.delete.while.running";
|
||||
public static final String JOB_CANNOT_PAUSE = "job.cannot.pause";
|
||||
public static final String JOB_CANNOT_RESUME = "job.cannot.resume";
|
||||
|
||||
public static final String JOB_CONFIG_BYFIELD_INCOMPATIBLE_FUNCTION = "job.config.byField.incompatible.function";
|
||||
public static final String JOB_CONFIG_BYFIELD_NEEDS_ANOTHER = "job.config.byField.needs.another";
|
||||
public static final String JOB_CONFIG_CATEGORIZATION_FILTERS_REQUIRE_CATEGORIZATION_FIELD_NAME = "job.config.categorization.filters."
|
||||
+ "require.categorization.field.name";
|
||||
public static final String JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_DUPLICATES = "job.config.categorization.filters.contains"
|
||||
+ ".duplicates";
|
||||
public static final String JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY = "job.config.categorization.filter.contains.empty";
|
||||
public static final String JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_INVALID_REGEX = "job.config.categorization.filter.contains."
|
||||
+ "invalid.regex";
|
||||
public static final String JOB_CONFIG_CONDITION_INVALID_OPERATOR = "job.config.condition.invalid.operator";
|
||||
public static final String JOB_CONFIG_CONDITION_INVALID_VALUE_NULL = "job.config.condition.invalid.value.null";
|
||||
public static final String JOB_CONFIG_CONDITION_INVALID_VALUE_NUMBER = "job.config.condition.invalid.value.numeric";
|
||||
public static final String JOB_CONFIG_CONDITION_INVALID_VALUE_REGEX = "job.config.condition.invalid.value.regex";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_CONDITION_CATEGORICAL_INVALID_OPTION = "job.config.detectionrule.condition."
|
||||
+ "categorical.invalid.option";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_CONDITION_CATEGORICAL_MISSING_OPTION = "job.config.detectionrule.condition."
|
||||
+ "categorical.missing.option";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_CONDITION_INVALID_FIELD_NAME = "job.config.detectionrule.condition.invalid."
|
||||
+ "fieldname";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_CONDITION_MISSING_FIELD_NAME = "job.config.detectionrule.condition.missing."
|
||||
+ "fieldname";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_INVALID_OPERATOR = "job.config.detectionrule.condition."
|
||||
+ "numerical.invalid.operator";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_INVALID_OPTION = "job.config.detectionrule.condition."
|
||||
+ "numerical.invalid.option";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_MISSING_OPTION = "job.config.detectionrule.condition."
|
||||
+ "numerical.missing.option";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_WITH_FIELD_NAME_REQUIRES_FIELD_VALUE = "job.config."
|
||||
+ "detectionrule.condition.numerical.with.fieldname.requires.fieldvalue";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_INVALID_TARGET_FIELD_NAME = "job.config.detectionrule.invalid.targetfieldname";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_MISSING_TARGET_FIELD_NAME = "job.config.detectionrule.missing.targetfieldname";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_NOT_SUPPORTED_BY_FUNCTION = "job.config.detectionrule.not.supported.by.function";
|
||||
public static final String JOB_CONFIG_DETECTION_RULE_REQUIRES_AT_LEAST_ONE_CONDITION = "job.config.detectionrule.requires.at."
|
||||
+ "least.one.condition";
|
||||
public static final String JOB_CONFIG_FIELDNAME_INCOMPATIBLE_FUNCTION = "job.config.fieldname.incompatible.function";
|
||||
public static final String JOB_CONFIG_FUNCTION_REQUIRES_BYFIELD = "job.config.function.requires.byfield";
|
||||
public static final String JOB_CONFIG_FUNCTION_REQUIRES_FIELDNAME = "job.config.function.requires.fieldname";
|
||||
public static final String JOB_CONFIG_FUNCTION_REQUIRES_OVERFIELD = "job.config.function.requires.overfield";
|
||||
public static final String JOB_CONFIG_ID_TOO_LONG = "job.config.id.too.long";
|
||||
public static final String JOB_CONFIG_ID_ALREADY_TAKEN = "job.config.id.already.taken";
|
||||
public static final String JOB_CONFIG_INVALID_FIELDNAME_CHARS = "job.config.invalid.fieldname.chars";
|
||||
public static final String JOB_CONFIG_INVALID_TIMEFORMAT = "job.config.invalid.timeformat";
|
||||
public static final String JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED = "job.config.function.incompatible.presummarized";
|
||||
public static final String JOB_CONFIG_MISSING_ANALYSISCONFIG = "job.config.missing.analysisconfig";
|
||||
public static final String JOB_CONFIG_MODEL_DEBUG_CONFIG_INVALID_BOUNDS_PERCENTILE = "job.config.model.debug.config.invalid.bounds."
|
||||
+ "percentile";
|
||||
public static final String JOB_CONFIG_FIELD_VALUE_TOO_LOW = "job.config.field.value.too.low";
|
||||
public static final String JOB_CONFIG_NO_ANALYSIS_FIELD = "job.config.no.analysis.field";
|
||||
public static final String JOB_CONFIG_NO_ANALYSIS_FIELD_NOT_COUNT = "job.config.no.analysis.field.not.count";
|
||||
public static final String JOB_CONFIG_NO_DETECTORS = "job.config.no.detectors";
|
||||
public static final String JOB_CONFIG_OVERFIELD_INCOMPATIBLE_FUNCTION = "job.config.overField.incompatible.function";
|
||||
public static final String JOB_CONFIG_OVERLAPPING_BUCKETS_INCOMPATIBLE_FUNCTION = "job.config.overlapping.buckets.incompatible."
|
||||
+ "function";
|
||||
public static final String JOB_CONFIG_OVERFIELD_NEEDS_ANOTHER = "job.config.overField.needs.another";
|
||||
public static final String JOB_CONFIG_MULTIPLE_BUCKETSPANS_REQUIRE_BUCKETSPAN = "job.config.multiple.bucketspans.require.bucket_span";
|
||||
public static final String JOB_CONFIG_MULTIPLE_BUCKETSPANS_MUST_BE_MULTIPLE = "job.config.multiple.bucketspans.must.be.multiple";
|
||||
public static final String JOB_CONFIG_PER_PARTITION_NORMALIZATION_REQUIRES_PARTITION_FIELD = "job.config.per.partition.normalization."
|
||||
+ "requires.partition.field";
|
||||
public static final String JOB_CONFIG_PER_PARTITION_NORMALIZATION_CANNOT_USE_INFLUENCERS = "job.config.per.partition.normalization."
|
||||
+ "cannot.use.influencers";
|
||||
|
||||
|
||||
public static final String JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_PARSE_ERROR = "job.config.update.analysis.limits.parse.error";
|
||||
public static final String JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_CANNOT_BE_NULL = "job.config.update.analysis.limits.cannot.be.null";
|
||||
public static final String JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_MODEL_MEMORY_LIMIT_CANNOT_BE_DECREASED = "job.config.update.analysis."
|
||||
+ "limits.model.memory.limit.cannot.be.decreased";
|
||||
public static final String JOB_CONFIG_UPDATE_CATEGORIZATION_FILTERS_INVALID = "job.config.update.categorization.filters.invalid";
|
||||
public static final String JOB_CONFIG_UPDATE_CUSTOM_SETTINGS_INVALID = "job.config.update.custom.settings.invalid";
|
||||
public static final String JOB_CONFIG_UPDATE_DESCRIPTION_INVALID = "job.config.update.description.invalid";
|
||||
public static final String JOB_CONFIG_UPDATE_DETECTORS_INVALID = "job.config.update.detectors.invalid";
|
||||
public static final String JOB_CONFIG_UPDATE_DETECTORS_INVALID_DETECTOR_INDEX = "job.config.update.detectors.invalid.detector.index";
|
||||
public static final String JOB_CONFIG_UPDATE_DETECTORS_DETECTOR_INDEX_SHOULD_BE_INTEGER = "job.config.update.detectors.detector.index."
|
||||
+ "should.be.integer";
|
||||
public static final String JOB_CONFIG_UPDATE_DETECTORS_MISSING_PARAMS = "job.config.update.detectors.missing.params";
|
||||
public static final String JOB_CONFIG_UPDATE_DETECTORS_DESCRIPTION_SHOULD_BE_STRING = "job.config.update.detectors.description.should"
|
||||
+ ".be.string";
|
||||
public static final String JOB_CONFIG_UPDATE_DETECTOR_RULES_PARSE_ERROR = "job.config.update.detectors.rules.parse.error";
|
||||
public static final String JOB_CONFIG_UPDATE_FAILED = "job.config.update.failed";
|
||||
public static final String JOB_CONFIG_UPDATE_INVALID_KEY = "job.config.update.invalid.key";
|
||||
public static final String JOB_CONFIG_UPDATE_IGNORE_DOWNTIME_PARSE_ERROR = "job.config.update.ignore.downtime.parse.error";
|
||||
public static final String JOB_CONFIG_UPDATE_JOB_IS_NOT_CLOSED = "job.config.update.job.is.not.closed";
|
||||
public static final String JOB_CONFIG_UPDATE_MODEL_DEBUG_CONFIG_PARSE_ERROR = "job.config.update.model.debug.config.parse.error";
|
||||
public static final String JOB_CONFIG_UPDATE_REQUIRES_NON_EMPTY_OBJECT = "job.config.update.requires.non.empty.object";
|
||||
public static final String JOB_CONFIG_UPDATE_PARSE_ERROR = "job.config.update.parse.error";
|
||||
public static final String JOB_CONFIG_UPDATE_BACKGROUND_PERSIST_INTERVAL_INVALID = "job.config.update.background.persist.interval."
|
||||
+ "invalid";
|
||||
public static final String JOB_CONFIG_UPDATE_RENORMALIZATION_WINDOW_DAYS_INVALID = "job.config.update.renormalization.window.days."
|
||||
+ "invalid";
|
||||
public static final String JOB_CONFIG_UPDATE_MODEL_SNAPSHOT_RETENTION_DAYS_INVALID = "job.config.update.model.snapshot.retention.days."
|
||||
+ "invalid";
|
||||
public static final String JOB_CONFIG_UPDATE_RESULTS_RETENTION_DAYS_INVALID = "job.config.update.results.retention.days.invalid";
|
||||
public static final String JOB_CONFIG_UPDATE_DATAFEED_CONFIG_PARSE_ERROR = "job.config.update.datafeed.config.parse.error";
|
||||
public static final String JOB_CONFIG_UPDATE_DATAFEED_CONFIG_CANNOT_BE_NULL = "job.config.update.datafeed.config.cannot.be.null";
|
||||
|
||||
public static final String JOB_CONFIG_UNKNOWN_FUNCTION = "job.config.unknown.function";
|
||||
|
||||
public static final String JOB_INDEX_ALREADY_EXISTS = "job.index.already.exists";
|
||||
|
||||
public static final String JOB_DATA_CONCURRENT_USE_CLOSE = "job.data.concurrent.use.close";
|
||||
public static final String JOB_DATA_CONCURRENT_USE_FLUSH = "job.data.concurrent.use.flush";
|
||||
public static final String JOB_DATA_CONCURRENT_USE_UPDATE = "job.data.concurrent.use.update";
|
||||
public static final String JOB_DATA_CONCURRENT_USE_UPLOAD = "job.data.concurrent.use.upload";
|
||||
|
||||
public static final String DATAFEED_CONFIG_INVALID_OPTION_VALUE = "datafeed.config.invalid.option.value";
|
||||
public static final String DATAFEED_CONFIG_CANNOT_USE_SCRIPT_FIELDS_WITH_AGGS = "datafeed.config.cannot.use.script.fields.with.aggs";
|
||||
|
||||
public static final String DATAFEED_DOES_NOT_SUPPORT_JOB_WITH_LATENCY = "datafeed.does.not.support.job.with.latency";
|
||||
public static final String DATAFEED_AGGREGATIONS_REQUIRES_JOB_WITH_SUMMARY_COUNT_FIELD =
|
||||
"datafeed.aggregations.requires.job.with.summary.count.field";
|
||||
|
||||
public static final String DATAFEED_CANNOT_START = "datafeed.cannot.start";
|
||||
public static final String DATAFEED_CANNOT_STOP_IN_CURRENT_STATE = "datafeed.cannot.stop.in.current.state";
|
||||
public static final String DATAFEED_CANNOT_UPDATE_IN_CURRENT_STATE = "datafeed.cannot.update.in.current.state";
|
||||
public static final String DATAFEED_CANNOT_DELETE_IN_CURRENT_STATE = "datafeed.cannot.delete.in.current.state";
|
||||
public static final String DATAFEED_FAILED_TO_STOP = "datafeed.failed.to.stop";
|
||||
public static final String DATAFEED_NOT_FOUND = "datafeed.not.found";
|
||||
|
||||
public static final String JOB_MISSING_QUANTILES = "job.missing.quantiles";
|
||||
public static final String JOB_UNKNOWN_ID = "job.unknown.id";
|
||||
|
||||
public static final String JSON_JOB_CONFIG_MAPPING = "json.job.config.mapping.error";
|
||||
public static final String JSON_JOB_CONFIG_PARSE = "json.job.config.parse.error";
|
||||
|
||||
public static final String JSON_DETECTOR_CONFIG_MAPPING = "json.detector.config.mapping.error";
|
||||
public static final String JSON_DETECTOR_CONFIG_PARSE = "json.detector.config.parse.error";
|
||||
|
||||
public static final String REST_ACTION_NOT_ALLOWED_FOR_DATAFEED_JOB = "rest.action.not.allowed.for.datafeed.job";
|
||||
|
||||
public static final String REST_INVALID_DATETIME_PARAMS = "rest.invalid.datetime.params";
|
||||
public static final String REST_INVALID_FLUSH_PARAMS_MISSING = "rest.invalid.flush.params.missing.argument";
|
||||
public static final String REST_INVALID_FLUSH_PARAMS_UNEXPECTED = "rest.invalid.flush.params.unexpected";
|
||||
public static final String REST_INVALID_RESET_PARAMS = "rest.invalid.reset.params";
|
||||
public static final String REST_INVALID_FROM = "rest.invalid.from";
|
||||
public static final String REST_INVALID_SIZE = "rest.invalid.size";
|
||||
public static final String REST_INVALID_FROM_SIZE_SUM = "rest.invalid.from.size.sum";
|
||||
public static final String REST_START_AFTER_END = "rest.start.after.end";
|
||||
public static final String REST_RESET_BUCKET_NO_LATENCY = "rest.reset.bucket.no.latency";
|
||||
public static final String REST_JOB_NOT_CLOSED_REVERT = "rest.job.not.closed.revert";
|
||||
public static final String REST_NO_SUCH_MODEL_SNAPSHOT = "rest.no.such.model.snapshot";
|
||||
public static final String REST_DESCRIPTION_ALREADY_USED = "rest.description.already.used";
|
||||
public static final String REST_CANNOT_DELETE_HIGHEST_PRIORITY = "rest.cannot.delete.highest.priority";
|
||||
|
||||
public static final String PROCESS_ACTION_SLEEPING_JOB = "process.action.sleeping.job";
|
||||
public static final String PROCESS_ACTION_CLOSED_JOB = "process.action.closed.job";
|
||||
public static final String PROCESS_ACTION_CLOSING_JOB = "process.action.closing.job";
|
||||
public static final String PROCESS_ACTION_DELETING_JOB = "process.action.deleting.job";
|
||||
public static final String PROCESS_ACTION_FLUSHING_JOB = "process.action.flushing.job";
|
||||
public static final String PROCESS_ACTION_PAUSING_JOB = "process.action.pausing.job";
|
||||
public static final String PROCESS_ACTION_RESUMING_JOB = "process.action.resuming.job";
|
||||
public static final String PROCESS_ACTION_REVERTING_JOB = "process.action.reverting.job";
|
||||
public static final String PROCESS_ACTION_UPDATING_JOB = "process.action.updating.job";
|
||||
public static final String PROCESS_ACTION_WRITING_JOB = "process.action.writing.job";
|
||||
|
||||
private Messages() {
|
||||
}
|
||||
|
||||
public static ResourceBundle load() {
|
||||
return ResourceBundle.getBundle(Messages.BUNDLE_NAME, Locale.getDefault());
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up the message string from the resource bundle.
|
||||
*
|
||||
* @param key Must be one of the statics defined in this file]
|
||||
*/
|
||||
public static String getMessage(String key) {
|
||||
return load().getString(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up the message string from the resource bundle and format with
|
||||
* the supplied arguments
|
||||
* @param key the key for the message
|
||||
* @param args MessageFormat arguments. See {@linkplain MessageFormat#format(Object)}]
|
||||
*/
|
||||
public static String getMessage(String key, Object...args) {
|
||||
return new MessageFormat(load().getString(key), Locale.ROOT).format(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,204 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.metadata;
|
||||
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Allocation extends AbstractDiffable<Allocation> implements ToXContent {
|
||||
|
||||
private static final ParseField NODE_ID_FIELD = new ParseField("node_id");
|
||||
private static final ParseField JOB_ID_FIELD = new ParseField("job_id");
|
||||
private static final ParseField IGNORE_DOWNTIME_FIELD = new ParseField("ignore_downtime");
|
||||
public static final ParseField STATE = new ParseField("state");
|
||||
public static final ParseField STATE_REASON = new ParseField("state_reason");
|
||||
|
||||
static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("allocation", Builder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setNodeId, NODE_ID_FIELD);
|
||||
PARSER.declareString(Builder::setJobId, JOB_ID_FIELD);
|
||||
PARSER.declareBoolean(Builder::setIgnoreDowntime, IGNORE_DOWNTIME_FIELD);
|
||||
PARSER.declareField(Builder::setState, (p, c) -> JobState.fromString(p.text()), STATE, ObjectParser.ValueType.STRING);
|
||||
PARSER.declareString(Builder::setStateReason, STATE_REASON);
|
||||
}
|
||||
|
||||
private final String nodeId;
|
||||
private final String jobId;
|
||||
private final boolean ignoreDowntime;
|
||||
private final JobState state;
|
||||
private final String stateReason;
|
||||
|
||||
public Allocation(String nodeId, String jobId, boolean ignoreDowntime, JobState state, String stateReason) {
|
||||
this.nodeId = nodeId;
|
||||
this.jobId = jobId;
|
||||
this.ignoreDowntime = ignoreDowntime;
|
||||
this.state = state;
|
||||
this.stateReason = stateReason;
|
||||
}
|
||||
|
||||
public Allocation(StreamInput in) throws IOException {
|
||||
this.nodeId = in.readOptionalString();
|
||||
this.jobId = in.readString();
|
||||
this.ignoreDowntime = in.readBoolean();
|
||||
this.state = JobState.fromStream(in);
|
||||
this.stateReason = in.readOptionalString();
|
||||
}
|
||||
|
||||
public String getNodeId() {
|
||||
return nodeId;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Whether to ignore downtime at startup.
|
||||
*
|
||||
* When the job state is set to STARTED, to ignoreDowntime will be set to false.
|
||||
*/
|
||||
public boolean isIgnoreDowntime() {
|
||||
return ignoreDowntime;
|
||||
}
|
||||
|
||||
public JobState getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public String getStateReason() {
|
||||
return stateReason;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalString(nodeId);
|
||||
out.writeString(jobId);
|
||||
out.writeBoolean(ignoreDowntime);
|
||||
state.writeTo(out);
|
||||
out.writeOptionalString(stateReason);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (nodeId != null) {
|
||||
builder.field(NODE_ID_FIELD.getPreferredName(), nodeId);
|
||||
}
|
||||
builder.field(JOB_ID_FIELD.getPreferredName(), jobId);
|
||||
builder.field(IGNORE_DOWNTIME_FIELD.getPreferredName(), ignoreDowntime);
|
||||
builder.field(STATE.getPreferredName(), state);
|
||||
if (stateReason != null) {
|
||||
builder.field(STATE_REASON.getPreferredName(), stateReason);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Allocation that = (Allocation) o;
|
||||
return Objects.equals(nodeId, that.nodeId) &&
|
||||
Objects.equals(jobId, that.jobId) &&
|
||||
Objects.equals(ignoreDowntime, that.ignoreDowntime) &&
|
||||
Objects.equals(state, that.state) &&
|
||||
Objects.equals(stateReason, that.stateReason);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(nodeId, jobId, ignoreDowntime, state, stateReason);
|
||||
}
|
||||
|
||||
// Class already extends from AbstractDiffable, so copied from ToXContentToBytes#toString()
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private String nodeId;
|
||||
private String jobId;
|
||||
private boolean ignoreDowntime;
|
||||
private JobState state;
|
||||
private String stateReason;
|
||||
|
||||
public Builder() {
|
||||
}
|
||||
|
||||
public Builder(Job job) {
|
||||
this.jobId = job.getId();
|
||||
}
|
||||
|
||||
public Builder(Allocation allocation) {
|
||||
this.nodeId = allocation.nodeId;
|
||||
this.jobId = allocation.jobId;
|
||||
this.ignoreDowntime = allocation.ignoreDowntime;
|
||||
this.state = allocation.state;
|
||||
this.stateReason = allocation.stateReason;
|
||||
}
|
||||
|
||||
public void setNodeId(String nodeId) {
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
public void setIgnoreDowntime(boolean ignoreDownTime) {
|
||||
this.ignoreDowntime = ignoreDownTime;
|
||||
}
|
||||
|
||||
@SuppressWarnings("incomplete-switch")
|
||||
public void setState(JobState newState) {
|
||||
if (this.state != null) {
|
||||
switch (newState) {
|
||||
case CLOSING:
|
||||
if (this.state != JobState.OPENED) {
|
||||
throw new IllegalArgumentException("[" + jobId + "] expected state [" + JobState.OPENED
|
||||
+ "], but got [" + state +"]");
|
||||
}
|
||||
break;
|
||||
case OPENING:
|
||||
if (this.state.isAnyOf(JobState.CLOSED, JobState.FAILED) == false) {
|
||||
throw new IllegalArgumentException("[" + jobId + "] expected state [" + JobState.CLOSED
|
||||
+ "] or [" + JobState.FAILED + "], but got [" + state +"]");
|
||||
}
|
||||
break;
|
||||
case OPENED:
|
||||
ignoreDowntime = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.state = newState;
|
||||
}
|
||||
|
||||
public void setStateReason(String stateReason) {
|
||||
this.stateReason = stateReason;
|
||||
}
|
||||
|
||||
public Allocation build() {
|
||||
return new Allocation(nodeId, jobId, ignoreDowntime, state, stateReason);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.metadata;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateListener;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.notifications.Auditor;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
public class MlInitializationService extends AbstractComponent implements ClusterStateListener {
|
||||
|
||||
private final ThreadPool threadPool;
|
||||
private final ClusterService clusterService;
|
||||
private final JobProvider jobProvider;
|
||||
|
||||
private final AtomicBoolean installMlMetadataCheck = new AtomicBoolean(false);
|
||||
private final AtomicBoolean createMlAuditIndexCheck = new AtomicBoolean(false);
|
||||
private final AtomicBoolean createMlMetaIndexCheck = new AtomicBoolean(false);
|
||||
private final AtomicBoolean createStateIndexCheck = new AtomicBoolean(false);
|
||||
|
||||
public MlInitializationService(Settings settings, ThreadPool threadPool, ClusterService clusterService,
|
||||
JobProvider jobProvider) {
|
||||
super(settings);
|
||||
this.threadPool = threadPool;
|
||||
this.clusterService = clusterService;
|
||||
this.jobProvider = jobProvider;
|
||||
clusterService.addListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clusterChanged(ClusterChangedEvent event) {
|
||||
if (event.localNodeMaster()) {
|
||||
MetaData metaData = event.state().metaData();
|
||||
if (metaData.custom(MlMetadata.TYPE) == null) {
|
||||
if (installMlMetadataCheck.compareAndSet(false, true)) {
|
||||
threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> {
|
||||
clusterService.submitStateUpdateTask("install-ml-metadata", new ClusterStateUpdateTask() {
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
ClusterState.Builder builder = new ClusterState.Builder(currentState);
|
||||
MetaData.Builder metadataBuilder = MetaData.builder(currentState.metaData());
|
||||
metadataBuilder.putCustom(MlMetadata.TYPE, MlMetadata.EMPTY_METADATA);
|
||||
builder.metaData(metadataBuilder.build());
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, Exception e) {
|
||||
logger.error("unable to install ml metadata upon startup", e);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
} else {
|
||||
installMlMetadataCheck.set(false);
|
||||
}
|
||||
if (metaData.hasIndex(Auditor.NOTIFICATIONS_INDEX) == false) {
|
||||
if (createMlAuditIndexCheck.compareAndSet(false, true)) {
|
||||
threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> {
|
||||
jobProvider.createNotificationMessageIndex((result, error) -> {
|
||||
if (result) {
|
||||
logger.info("successfully created {} index", Auditor.NOTIFICATIONS_INDEX);
|
||||
} else {
|
||||
if (error instanceof ResourceAlreadyExistsException) {
|
||||
logger.debug("not able to create {} index as it already exists", Auditor.NOTIFICATIONS_INDEX);
|
||||
} else {
|
||||
logger.error(
|
||||
new ParameterizedMessage("not able to create {} index", Auditor.NOTIFICATIONS_INDEX), error);
|
||||
}
|
||||
}
|
||||
createMlAuditIndexCheck.set(false);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
if (metaData.hasIndex(JobProvider.ML_META_INDEX) == false) {
|
||||
if (createMlMetaIndexCheck.compareAndSet(false, true)) {
|
||||
threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> {
|
||||
jobProvider.createMetaIndex((result, error) -> {
|
||||
if (result) {
|
||||
logger.info("successfully created {} index", JobProvider.ML_META_INDEX);
|
||||
} else {
|
||||
if (error instanceof ResourceAlreadyExistsException) {
|
||||
logger.debug("not able to create {} index as it already exists", JobProvider.ML_META_INDEX);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage("not able to create {} index", JobProvider.ML_META_INDEX), error);
|
||||
}
|
||||
}
|
||||
createMlMetaIndexCheck.set(false);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
String stateIndexName = AnomalyDetectorsIndex.jobStateIndexName();
|
||||
if (metaData.hasIndex(stateIndexName) == false) {
|
||||
if (createStateIndexCheck.compareAndSet(false, true)) {
|
||||
threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> {
|
||||
jobProvider.createJobStateIndex((result, error) -> {
|
||||
if (result) {
|
||||
logger.info("successfully created {} index", stateIndexName);
|
||||
} else {
|
||||
if (error instanceof ResourceAlreadyExistsException) {
|
||||
logger.debug("not able to create {} index as it already exists", stateIndexName);
|
||||
} else {
|
||||
logger.error("not able to create " + stateIndexName + " index", error);
|
||||
}
|
||||
}
|
||||
createStateIndexCheck.set(false);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,441 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.metadata;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
import org.elasticsearch.cluster.DiffableUtils;
|
||||
import org.elasticsearch.cluster.NamedDiff;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.xpack.ml.action.StartDatafeedAction;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedJobValidator;
|
||||
import org.elasticsearch.xpack.ml.datafeed.DatafeedState;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress;
|
||||
import org.elasticsearch.xpack.persistent.PersistentTasksInProgress.PersistentTaskInProgress;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class MlMetadata implements MetaData.Custom {
|
||||
|
||||
private static final ParseField JOBS_FIELD = new ParseField("jobs");
|
||||
private static final ParseField ALLOCATIONS_FIELD = new ParseField("allocations");
|
||||
private static final ParseField DATAFEEDS_FIELD = new ParseField("datafeeds");
|
||||
|
||||
public static final String TYPE = "ml";
|
||||
public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(),
|
||||
Collections.emptySortedMap(), Collections.emptySortedMap());
|
||||
|
||||
public static final ObjectParser<Builder, Void> ML_METADATA_PARSER = new ObjectParser<>("ml_metadata",
|
||||
Builder::new);
|
||||
|
||||
static {
|
||||
ML_METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.PARSER.apply(p, c).build(), JOBS_FIELD);
|
||||
ML_METADATA_PARSER.declareObjectArray(Builder::putAllocations, Allocation.PARSER, ALLOCATIONS_FIELD);
|
||||
ML_METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, (p, c) -> DatafeedConfig.PARSER.apply(p, c).build(), DATAFEEDS_FIELD);
|
||||
}
|
||||
|
||||
private final SortedMap<String, Job> jobs;
|
||||
private final SortedMap<String, Allocation> allocations;
|
||||
private final SortedMap<String, DatafeedConfig> datafeeds;
|
||||
|
||||
private MlMetadata(SortedMap<String, Job> jobs, SortedMap<String, Allocation> allocations,
|
||||
SortedMap<String, DatafeedConfig> datafeeds) {
|
||||
this.jobs = Collections.unmodifiableSortedMap(jobs);
|
||||
this.allocations = Collections.unmodifiableSortedMap(allocations);
|
||||
this.datafeeds = Collections.unmodifiableSortedMap(datafeeds);
|
||||
}
|
||||
|
||||
public Map<String, Job> getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public SortedMap<String, Allocation> getAllocations() {
|
||||
return allocations;
|
||||
}
|
||||
|
||||
public SortedMap<String, DatafeedConfig> getDatafeeds() {
|
||||
return datafeeds;
|
||||
}
|
||||
|
||||
public DatafeedConfig getDatafeed(String datafeedId) {
|
||||
return datafeeds.get(datafeedId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EnumSet<MetaData.XContentContext> context() {
|
||||
// NORELEASE: Also include SNAPSHOT, but then we need to split the allocations from here and add them
|
||||
// as ClusterState.Custom metadata, because only the job definitions should be stored in snapshots.
|
||||
return MetaData.API_AND_GATEWAY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Diff<MetaData.Custom> diff(MetaData.Custom previousState) {
|
||||
return new MlMetadataDiff((MlMetadata) previousState, this);
|
||||
}
|
||||
|
||||
public MlMetadata(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
TreeMap<String, Job> jobs = new TreeMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
jobs.put(in.readString(), new Job(in));
|
||||
}
|
||||
this.jobs = jobs;
|
||||
size = in.readVInt();
|
||||
TreeMap<String, Allocation> allocations = new TreeMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
allocations.put(in.readString(), new Allocation(in));
|
||||
}
|
||||
this.allocations = allocations;
|
||||
size = in.readVInt();
|
||||
TreeMap<String, DatafeedConfig> datafeeds = new TreeMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
datafeeds.put(in.readString(), new DatafeedConfig(in));
|
||||
}
|
||||
this.datafeeds = datafeeds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
writeMap(jobs, out);
|
||||
writeMap(allocations, out);
|
||||
writeMap(datafeeds, out);
|
||||
}
|
||||
|
||||
private static <T extends Writeable> void writeMap(Map<String, T> map, StreamOutput out) throws IOException {
|
||||
out.writeVInt(map.size());
|
||||
for (Map.Entry<String, T> entry : map.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
mapValuesToXContent(JOBS_FIELD, jobs, builder, params);
|
||||
mapValuesToXContent(ALLOCATIONS_FIELD, allocations, builder, params);
|
||||
mapValuesToXContent(DATAFEEDS_FIELD, datafeeds, builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static <T extends ToXContent> void mapValuesToXContent(ParseField field, Map<String, T> map, XContentBuilder builder,
|
||||
Params params) throws IOException {
|
||||
builder.startArray(field.getPreferredName());
|
||||
for (Map.Entry<String, T> entry : map.entrySet()) {
|
||||
entry.getValue().toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
||||
public static class MlMetadataDiff implements NamedDiff<MetaData.Custom> {
|
||||
|
||||
final Diff<Map<String, Job>> jobs;
|
||||
final Diff<Map<String, Allocation>> allocations;
|
||||
final Diff<Map<String, DatafeedConfig>> datafeeds;
|
||||
|
||||
MlMetadataDiff(MlMetadata before, MlMetadata after) {
|
||||
this.jobs = DiffableUtils.diff(before.jobs, after.jobs, DiffableUtils.getStringKeySerializer());
|
||||
this.allocations = DiffableUtils.diff(before.allocations, after.allocations, DiffableUtils.getStringKeySerializer());
|
||||
this.datafeeds = DiffableUtils.diff(before.datafeeds, after.datafeeds, DiffableUtils.getStringKeySerializer());
|
||||
}
|
||||
|
||||
public MlMetadataDiff(StreamInput in) throws IOException {
|
||||
this.jobs = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), Job::new,
|
||||
MlMetadataDiff::readJobDiffFrom);
|
||||
this.allocations = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), Allocation::new,
|
||||
MlMetadataDiff::readAllocationDiffFrom);
|
||||
this.datafeeds = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), DatafeedConfig::new,
|
||||
MlMetadataDiff::readSchedulerDiffFrom);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaData.Custom apply(MetaData.Custom part) {
|
||||
TreeMap<String, Job> newJobs = new TreeMap<>(jobs.apply(((MlMetadata) part).jobs));
|
||||
TreeMap<String, Allocation> newAllocations = new TreeMap<>(allocations.apply(((MlMetadata) part).allocations));
|
||||
TreeMap<String, DatafeedConfig> newDatafeeds = new TreeMap<>(datafeeds.apply(((MlMetadata) part).datafeeds));
|
||||
return new MlMetadata(newJobs, newAllocations, newDatafeeds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
jobs.writeTo(out);
|
||||
allocations.writeTo(out);
|
||||
datafeeds.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
static Diff<Job> readJobDiffFrom(StreamInput in) throws IOException {
|
||||
return AbstractDiffable.readDiffFrom(Job::new, in);
|
||||
}
|
||||
|
||||
static Diff<Allocation> readAllocationDiffFrom(StreamInput in) throws IOException {
|
||||
return AbstractDiffable.readDiffFrom(Allocation::new, in);
|
||||
}
|
||||
|
||||
static Diff<DatafeedConfig> readSchedulerDiffFrom(StreamInput in) throws IOException {
|
||||
return AbstractDiffable.readDiffFrom(DatafeedConfig::new, in);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
MlMetadata that = (MlMetadata) o;
|
||||
return Objects.equals(jobs, that.jobs) &&
|
||||
Objects.equals(allocations, that.allocations) &&
|
||||
Objects.equals(datafeeds, that.datafeeds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobs, allocations, datafeeds);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private TreeMap<String, Job> jobs;
|
||||
private TreeMap<String, Allocation> allocations;
|
||||
private TreeMap<String, DatafeedConfig> datafeeds;
|
||||
|
||||
public Builder() {
|
||||
this.jobs = new TreeMap<>();
|
||||
this.allocations = new TreeMap<>();
|
||||
this.datafeeds = new TreeMap<>();
|
||||
}
|
||||
|
||||
public Builder(MlMetadata previous) {
|
||||
jobs = new TreeMap<>(previous.jobs);
|
||||
allocations = new TreeMap<>(previous.allocations);
|
||||
datafeeds = new TreeMap<>(previous.datafeeds);
|
||||
}
|
||||
|
||||
public Builder putJob(Job job, boolean overwrite) {
|
||||
if (jobs.containsKey(job.getId()) && overwrite == false) {
|
||||
throw ExceptionsHelper.jobAlreadyExists(job.getId());
|
||||
}
|
||||
this.jobs.put(job.getId(), job);
|
||||
|
||||
Allocation allocation = allocations.get(job.getId());
|
||||
if (allocation == null) {
|
||||
Allocation.Builder builder = new Allocation.Builder(job);
|
||||
builder.setState(JobState.CLOSED);
|
||||
allocations.put(job.getId(), builder.build());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder deleteJob(String jobId) {
|
||||
|
||||
Job job = jobs.remove(jobId);
|
||||
if (job == null) {
|
||||
throw new ResourceNotFoundException("job [" + jobId + "] does not exist");
|
||||
}
|
||||
|
||||
Optional<DatafeedConfig> datafeed = getDatafeedByJobId(jobId);
|
||||
if (datafeed.isPresent()) {
|
||||
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
|
||||
+ datafeed.get().getId() + "] refers to it");
|
||||
}
|
||||
|
||||
Allocation previousAllocation = this.allocations.remove(jobId);
|
||||
if (previousAllocation != null) {
|
||||
if (!previousAllocation.getState().equals(JobState.DELETING)) {
|
||||
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] because it is in ["
|
||||
+ previousAllocation.getState() + "] state. Must be in [" + JobState.DELETING + "] state.");
|
||||
}
|
||||
} else {
|
||||
throw new ResourceNotFoundException("No Cluster State found for job [" + jobId + "]");
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder putDatafeed(DatafeedConfig datafeedConfig) {
|
||||
if (datafeeds.containsKey(datafeedConfig.getId())) {
|
||||
throw new ResourceAlreadyExistsException("A datafeed with id [" + datafeedConfig.getId() + "] already exists");
|
||||
}
|
||||
String jobId = datafeedConfig.getJobId();
|
||||
Job job = jobs.get(jobId);
|
||||
if (job == null) {
|
||||
throw ExceptionsHelper.missingJobException(jobId);
|
||||
}
|
||||
Optional<DatafeedConfig> existingDatafeed = getDatafeedByJobId(jobId);
|
||||
if (existingDatafeed.isPresent()) {
|
||||
throw ExceptionsHelper.conflictStatusException("A datafeed [" + existingDatafeed.get().getId()
|
||||
+ "] already exists for job [" + jobId + "]");
|
||||
}
|
||||
DatafeedJobValidator.validate(datafeedConfig, job);
|
||||
|
||||
datafeeds.put(datafeedConfig.getId(), datafeedConfig);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder removeDatafeed(String datafeedId, PersistentTasksInProgress persistentTasksInProgress) {
|
||||
DatafeedConfig datafeed = datafeeds.get(datafeedId);
|
||||
if (datafeed == null) {
|
||||
throw ExceptionsHelper.missingDatafeedException(datafeedId);
|
||||
}
|
||||
if (persistentTasksInProgress != null) {
|
||||
Predicate<PersistentTaskInProgress<?>> predicate = t -> {
|
||||
StartDatafeedAction.Request storedRequest = (StartDatafeedAction.Request) t.getRequest();
|
||||
return storedRequest.getDatafeedId().equals(datafeedId);
|
||||
};
|
||||
if (persistentTasksInProgress.tasksExist(StartDatafeedAction.NAME, predicate)) {
|
||||
String msg = Messages.getMessage(Messages.DATAFEED_CANNOT_DELETE_IN_CURRENT_STATE, datafeedId,
|
||||
DatafeedState.STARTED);
|
||||
throw ExceptionsHelper.conflictStatusException(msg);
|
||||
}
|
||||
}
|
||||
datafeeds.remove(datafeedId);
|
||||
return this;
|
||||
}
|
||||
|
||||
private Optional<DatafeedConfig> getDatafeedByJobId(String jobId) {
|
||||
return datafeeds.values().stream().filter(s -> s.getJobId().equals(jobId)).findFirst();
|
||||
}
|
||||
|
||||
// only for parsing
|
||||
private Builder putAllocations(Collection<Allocation.Builder> allocations) {
|
||||
for (Allocation.Builder allocationBuilder : allocations) {
|
||||
Allocation allocation = allocationBuilder.build();
|
||||
this.allocations.put(allocation.getJobId(), allocation);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private Builder putJobs(Collection<Job> jobs) {
|
||||
for (Job job : jobs) {
|
||||
putJob(job, true);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private Builder putDatafeeds(Collection<DatafeedConfig> datafeeds) {
|
||||
for (DatafeedConfig datafeed : datafeeds) {
|
||||
this.datafeeds.put(datafeed.getId(), datafeed);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public MlMetadata build() {
|
||||
return new MlMetadata(jobs, allocations, datafeeds);
|
||||
}
|
||||
|
||||
public Builder assignToNode(String jobId, String nodeId) {
|
||||
Allocation allocation = allocations.get(jobId);
|
||||
if (allocation == null) {
|
||||
throw new IllegalStateException("[" + jobId + "] no allocation to assign to node [" + nodeId + "]");
|
||||
}
|
||||
Allocation.Builder builder = new Allocation.Builder(allocation);
|
||||
builder.setNodeId(nodeId);
|
||||
allocations.put(jobId, builder.build());
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder updateState(String jobId, JobState jobState, @Nullable String reason) {
|
||||
if (jobs.containsKey(jobId) == false) {
|
||||
throw ExceptionsHelper.missingJobException(jobId);
|
||||
}
|
||||
|
||||
Allocation previous = allocations.get(jobId);
|
||||
if (previous == null) {
|
||||
throw new IllegalStateException("[" + jobId + "] no allocation exist to update the state to [" + jobState + "]");
|
||||
}
|
||||
|
||||
// Cannot update the state to DELETING if there are datafeeds attached
|
||||
if (jobState.equals(JobState.DELETING)) {
|
||||
Optional<DatafeedConfig> datafeed = getDatafeedByJobId(jobId);
|
||||
if (datafeed.isPresent()) {
|
||||
throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] while datafeed ["
|
||||
+ datafeed.get().getId() + "] refers to it");
|
||||
}
|
||||
}
|
||||
|
||||
if (previous.getState().equals(JobState.DELETING)) {
|
||||
// If we're already Deleting there's nothing to do
|
||||
if (jobState.equals(JobState.DELETING)) {
|
||||
return this;
|
||||
}
|
||||
|
||||
// Once a job goes into Deleting, it cannot be changed
|
||||
throw new ElasticsearchStatusException("Cannot change state of job [" + jobId + "] to [" + jobState + "] because " +
|
||||
"it is currently in [" + JobState.DELETING + "] state.", RestStatus.CONFLICT);
|
||||
}
|
||||
Allocation.Builder builder = new Allocation.Builder(previous);
|
||||
builder.setState(jobState);
|
||||
if (reason != null) {
|
||||
builder.setStateReason(reason);
|
||||
}
|
||||
if (previous.getState() != jobState && jobState == JobState.CLOSED) {
|
||||
Job.Builder job = new Job.Builder(this.jobs.get(jobId));
|
||||
job.setFinishedTime(new Date());
|
||||
this.jobs.put(job.getId(), job.build());
|
||||
}
|
||||
allocations.put(jobId, builder.build());
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setIgnoreDowntime(String jobId) {
|
||||
if (jobs.containsKey(jobId) == false) {
|
||||
throw ExceptionsHelper.missingJobException(jobId);
|
||||
}
|
||||
|
||||
Allocation allocation = allocations.get(jobId);
|
||||
if (allocation == null) {
|
||||
throw new IllegalStateException("[" + jobId + "] no allocation to ignore downtime");
|
||||
}
|
||||
Allocation.Builder builder = new Allocation.Builder(allocation);
|
||||
builder.setIgnoreDowntime(true);
|
||||
allocations.put(jobId, builder.build());
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
/**
|
||||
* Methods for handling index naming related functions
|
||||
*/
|
||||
public final class AnomalyDetectorsIndex {
|
||||
private static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-";
|
||||
private static final String STATE_INDEX_NAME = ".ml-state";
|
||||
|
||||
private AnomalyDetectorsIndex() {
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the default index where the job's results are stored
|
||||
* @param jobId Job Id
|
||||
* @return The index name
|
||||
*/
|
||||
public static String jobResultsIndexName(String jobId) {
|
||||
return RESULTS_INDEX_PREFIX + jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the default index where a job's state is stored
|
||||
* @return The index name
|
||||
*/
|
||||
public static String jobStateIndexName() {
|
||||
return STATE_INDEX_NAME;
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
class BatchedBucketsIterator extends BatchedResultsIterator<Bucket> {
|
||||
|
||||
BatchedBucketsIterator(Client client, String jobId) {
|
||||
super(client, jobId, Bucket.RESULT_TYPE_VALUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ResultWithIndex<Bucket> map(SearchHit hit) {
|
||||
BytesReference source = hit.getSourceRef();
|
||||
XContentParser parser;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parse bucket", e);
|
||||
}
|
||||
Bucket bucket = Bucket.PARSER.apply(parser, null);
|
||||
return new ResultWithIndex<>(hit.getIndex(), bucket);
|
||||
}
|
||||
}
|
@ -0,0 +1,166 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Arrays;
|
||||
import java.util.Deque;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An iterator useful to fetch a big number of documents of type T
|
||||
* and iterate through them in batches.
|
||||
*/
|
||||
public abstract class BatchedDocumentsIterator<T> {
|
||||
private static final Logger LOGGER = Loggers.getLogger(BatchedDocumentsIterator.class);
|
||||
|
||||
private static final String CONTEXT_ALIVE_DURATION = "5m";
|
||||
private static final int BATCH_SIZE = 10000;
|
||||
|
||||
private final Client client;
|
||||
private final String index;
|
||||
private final ResultsFilterBuilder filterBuilder;
|
||||
private volatile long count;
|
||||
private volatile long totalHits;
|
||||
private volatile String scrollId;
|
||||
private volatile boolean isScrollInitialised;
|
||||
|
||||
public BatchedDocumentsIterator(Client client, String index) {
|
||||
this(client, index, new ResultsFilterBuilder());
|
||||
}
|
||||
|
||||
protected BatchedDocumentsIterator(Client client, String index, QueryBuilder queryBuilder) {
|
||||
this(client, index, new ResultsFilterBuilder(queryBuilder));
|
||||
}
|
||||
|
||||
private BatchedDocumentsIterator(Client client, String index, ResultsFilterBuilder resultsFilterBuilder) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.index = Objects.requireNonNull(index);
|
||||
totalHits = 0;
|
||||
count = 0;
|
||||
filterBuilder = Objects.requireNonNull(resultsFilterBuilder);
|
||||
isScrollInitialised = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query documents whose timestamp is within the given time range
|
||||
*
|
||||
* @param startEpochMs the start time as epoch milliseconds (inclusive)
|
||||
* @param endEpochMs the end time as epoch milliseconds (exclusive)
|
||||
* @return the iterator itself
|
||||
*/
|
||||
public BatchedDocumentsIterator<T> timeRange(long startEpochMs, long endEpochMs) {
|
||||
filterBuilder.timeRange(Bucket.TIMESTAMP.getPreferredName(), startEpochMs, endEpochMs);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include interim documents
|
||||
*
|
||||
* @param interimFieldName Name of the include interim field
|
||||
*/
|
||||
public BatchedDocumentsIterator<T> includeInterim(String interimFieldName) {
|
||||
filterBuilder.interim(interimFieldName, true);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if the iteration has more elements.
|
||||
* (In other words, returns {@code true} if {@link #next} would
|
||||
* return an element rather than throwing an exception.)
|
||||
*
|
||||
* @return {@code true} if the iteration has more elements
|
||||
*/
|
||||
public boolean hasNext() {
|
||||
return !isScrollInitialised || count != totalHits;
|
||||
}
|
||||
|
||||
/**
|
||||
* The first time next() is called, the search will be performed and the first
|
||||
* batch will be returned. Any subsequent call will return the following batches.
|
||||
* <p>
|
||||
* Note that in some implementations it is possible that when there are no
|
||||
* results at all, the first time this method is called an empty {@code Deque} is returned.
|
||||
*
|
||||
* @return a {@code Deque} with the next batch of documents
|
||||
* @throws NoSuchElementException if the iteration has no more elements
|
||||
*/
|
||||
public Deque<T> next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
|
||||
SearchResponse searchResponse;
|
||||
if (scrollId == null) {
|
||||
searchResponse = initScroll();
|
||||
} else {
|
||||
SearchScrollRequest searchScrollRequest = new SearchScrollRequest(scrollId).scroll(CONTEXT_ALIVE_DURATION);
|
||||
searchResponse = client.searchScroll(searchScrollRequest).actionGet();
|
||||
}
|
||||
scrollId = searchResponse.getScrollId();
|
||||
return mapHits(searchResponse);
|
||||
}
|
||||
|
||||
private SearchResponse initScroll() {
|
||||
LOGGER.trace("ES API CALL: search all of type {} from index {}", getType(), index);
|
||||
|
||||
isScrollInitialised = true;
|
||||
|
||||
SearchRequest searchRequest = new SearchRequest(index);
|
||||
searchRequest.types(getType());
|
||||
searchRequest.scroll(CONTEXT_ALIVE_DURATION);
|
||||
searchRequest.source(new SearchSourceBuilder()
|
||||
.size(BATCH_SIZE)
|
||||
.query(filterBuilder.build())
|
||||
.sort(SortBuilders.fieldSort(ElasticsearchMappings.ES_DOC)));
|
||||
|
||||
SearchResponse searchResponse = client.search(searchRequest).actionGet();
|
||||
totalHits = searchResponse.getHits().getTotalHits();
|
||||
scrollId = searchResponse.getScrollId();
|
||||
return searchResponse;
|
||||
}
|
||||
|
||||
private Deque<T> mapHits(SearchResponse searchResponse) {
|
||||
Deque<T> results = new ArrayDeque<>();
|
||||
|
||||
SearchHit[] hits = searchResponse.getHits().getHits();
|
||||
for (SearchHit hit : hits) {
|
||||
T mapped = map(hit);
|
||||
if (mapped != null) {
|
||||
results.add(mapped);
|
||||
}
|
||||
}
|
||||
count += hits.length;
|
||||
|
||||
if (!hasNext() && scrollId != null) {
|
||||
client.prepareClearScroll().setScrollIds(Arrays.asList(scrollId)).get();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
protected abstract String getType();
|
||||
|
||||
/**
|
||||
* Maps the search hit to the document type
|
||||
* @param hit
|
||||
* the search hit
|
||||
* @return The mapped document or {@code null} if the mapping failed
|
||||
*/
|
||||
protected abstract T map(SearchHit hit);
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.ml.job.results.Influencer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
class BatchedInfluencersIterator extends BatchedResultsIterator<Influencer> {
|
||||
BatchedInfluencersIterator(Client client, String jobId) {
|
||||
super(client, jobId, Influencer.RESULT_TYPE_VALUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ResultWithIndex<Influencer> map(SearchHit hit) {
|
||||
BytesReference source = hit.getSourceRef();
|
||||
XContentParser parser;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parser influencer", e);
|
||||
}
|
||||
|
||||
Influencer influencer = Influencer.PARSER.apply(parser, null);
|
||||
return new ResultWithIndex<>(hit.getIndex(), influencer);
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
class BatchedRecordsIterator extends BatchedResultsIterator<AnomalyRecord> {
|
||||
|
||||
BatchedRecordsIterator(Client client, String jobId) {
|
||||
super(client, jobId, AnomalyRecord.RESULT_TYPE_VALUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ResultWithIndex<AnomalyRecord> map(SearchHit hit) {
|
||||
BytesReference source = hit.getSourceRef();
|
||||
XContentParser parser;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parse record", e);
|
||||
}
|
||||
AnomalyRecord record = AnomalyRecord.PARSER.apply(parser, null);
|
||||
return new ResultWithIndex<>(hit.getIndex(), record);
|
||||
}
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.results.Result;
|
||||
|
||||
public abstract class BatchedResultsIterator<T>
|
||||
extends BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<T>> {
|
||||
|
||||
public BatchedResultsIterator(Client client, String jobId, String resultType) {
|
||||
super(client, AnomalyDetectorsIndex.jobResultsIndexName(jobId),
|
||||
new TermsQueryBuilder(Result.RESULT_TYPE.getPreferredName(), resultType));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getType() {
|
||||
return Result.TYPE.getPreferredName();
|
||||
}
|
||||
|
||||
public static class ResultWithIndex<T> {
|
||||
public final String indexName;
|
||||
public final T result;
|
||||
|
||||
public ResultWithIndex(String indexName, T result) {
|
||||
this.indexName = indexName;
|
||||
this.result = result;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,230 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* One time query builder for buckets.
|
||||
* <ul>
|
||||
* <li>From- Skip the first N Buckets. This parameter is for paging if not
|
||||
* required set to 0. Default = 0</li>
|
||||
* <li>Size- Take only this number of Buckets. Default =
|
||||
* {@value DEFAULT_SIZE}</li>
|
||||
* <li>Expand- Include anomaly records. Default= false</li>
|
||||
* <li>IncludeInterim- Include interim results. Default = false</li>
|
||||
* <li>anomalyScoreThreshold- Return only buckets with an anomalyScore >=
|
||||
* this value. Default = 0.0</li>
|
||||
* <li>normalizedProbabilityThreshold- Return only buckets with a
|
||||
* maxNormalizedProbability >= this value. Default = 0.0</li>
|
||||
* <li>start- The start bucket time. A bucket with this timestamp will be
|
||||
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
|
||||
* returned. Default = -1</li>
|
||||
* <li>end- The end bucket timestamp buckets up to but NOT including this
|
||||
* timestamp are returned. If 0 all buckets from <code>startEpochMs</code> are
|
||||
* returned. Default = -1</li>
|
||||
* <li>partitionValue Set the bucket's max normalized probability to this
|
||||
* partition field value's max normalized probability. Default = null</li>
|
||||
* </ul>
|
||||
*/
|
||||
public final class BucketsQueryBuilder {
|
||||
public static final int DEFAULT_SIZE = 100;
|
||||
|
||||
private BucketsQuery bucketsQuery = new BucketsQuery();
|
||||
|
||||
public BucketsQueryBuilder from(int from) {
|
||||
bucketsQuery.from = from;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder size(int size) {
|
||||
bucketsQuery.size = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder expand(boolean expand) {
|
||||
bucketsQuery.expand = expand;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder includeInterim(boolean include) {
|
||||
bucketsQuery.includeInterim = include;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder anomalyScoreThreshold(Double anomalyScoreFilter) {
|
||||
if (anomalyScoreFilter != null) {
|
||||
bucketsQuery.anomalyScoreFilter = anomalyScoreFilter;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder normalizedProbabilityThreshold(Double normalizedProbability) {
|
||||
if (normalizedProbability != null) {
|
||||
bucketsQuery.normalizedProbability = normalizedProbability;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param partitionValue Not set if null or empty
|
||||
*/
|
||||
public BucketsQueryBuilder partitionValue(String partitionValue) {
|
||||
if (!Strings.isNullOrEmpty(partitionValue)) {
|
||||
bucketsQuery.partitionValue = partitionValue;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder sortField(String sortField) {
|
||||
bucketsQuery.sortField = sortField;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder sortDescending(boolean sortDescending) {
|
||||
bucketsQuery.sortDescending = sortDescending;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* If startTime <= 0 the parameter is not set
|
||||
*/
|
||||
public BucketsQueryBuilder start(String startTime) {
|
||||
bucketsQuery.start = startTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* If endTime <= 0 the parameter is not set
|
||||
*/
|
||||
public BucketsQueryBuilder end(String endTime) {
|
||||
bucketsQuery.end = endTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder timestamp(String timestamp) {
|
||||
bucketsQuery.timestamp = timestamp;
|
||||
bucketsQuery.size = 1;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketsQueryBuilder.BucketsQuery build() {
|
||||
if (bucketsQuery.timestamp != null && (bucketsQuery.start != null || bucketsQuery.end != null)) {
|
||||
throw new IllegalStateException("Either specify timestamp or start/end");
|
||||
}
|
||||
|
||||
return bucketsQuery;
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
bucketsQuery = new BucketsQueryBuilder.BucketsQuery();
|
||||
}
|
||||
|
||||
|
||||
public class BucketsQuery {
|
||||
private int from = 0;
|
||||
private int size = DEFAULT_SIZE;
|
||||
private boolean expand = false;
|
||||
private boolean includeInterim = false;
|
||||
private double anomalyScoreFilter = 0.0d;
|
||||
private double normalizedProbability = 0.0d;
|
||||
private String start;
|
||||
private String end;
|
||||
private String timestamp;
|
||||
private String partitionValue = null;
|
||||
private String sortField = Bucket.TIMESTAMP.getPreferredName();
|
||||
private boolean sortDescending = false;
|
||||
|
||||
public int getFrom() {
|
||||
return from;
|
||||
}
|
||||
|
||||
public int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public boolean isExpand() {
|
||||
return expand;
|
||||
}
|
||||
|
||||
public boolean isIncludeInterim() {
|
||||
return includeInterim;
|
||||
}
|
||||
|
||||
public double getAnomalyScoreFilter() {
|
||||
return anomalyScoreFilter;
|
||||
}
|
||||
|
||||
public double getNormalizedProbability() {
|
||||
return normalizedProbability;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public String getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Null if not set
|
||||
*/
|
||||
public String getPartitionValue() {
|
||||
return partitionValue;
|
||||
}
|
||||
|
||||
public String getSortField() {
|
||||
return sortField;
|
||||
}
|
||||
|
||||
public boolean isSortDescending() {
|
||||
return sortDescending;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(from, size, expand, includeInterim, anomalyScoreFilter, normalizedProbability, start, end,
|
||||
timestamp, partitionValue, sortField, sortDescending);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BucketsQuery other = (BucketsQuery) obj;
|
||||
return Objects.equals(from, other.from) &&
|
||||
Objects.equals(size, other.size) &&
|
||||
Objects.equals(expand, other.expand) &&
|
||||
Objects.equals(includeInterim, other.includeInterim) &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(timestamp, other.timestamp) &&
|
||||
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
|
||||
Objects.equals(normalizedProbability, other.normalizedProbability) &&
|
||||
Objects.equals(partitionValue, other.partitionValue) &&
|
||||
Objects.equals(sortField, other.sortField) &&
|
||||
this.sortDescending == other.sortDescending;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,671 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.CategorizerState;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelState;
|
||||
import org.elasticsearch.xpack.ml.job.results.ReservedFieldNames;
|
||||
import org.elasticsearch.xpack.ml.notifications.AuditActivity;
|
||||
import org.elasticsearch.xpack.ml.notifications.AuditMessage;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles;
|
||||
import org.elasticsearch.xpack.ml.job.results.AnomalyCause;
|
||||
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
import org.elasticsearch.xpack.ml.job.results.BucketInfluencer;
|
||||
import org.elasticsearch.xpack.ml.job.results.CategoryDefinition;
|
||||
import org.elasticsearch.xpack.ml.job.results.Influence;
|
||||
import org.elasticsearch.xpack.ml.job.results.Influencer;
|
||||
import org.elasticsearch.xpack.ml.job.results.ModelDebugOutput;
|
||||
import org.elasticsearch.xpack.ml.job.results.PerPartitionMaxProbabilities;
|
||||
import org.elasticsearch.xpack.ml.job.results.Result;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
/**
|
||||
* Static methods to create Elasticsearch mappings for the autodetect
|
||||
* persisted objects/documents
|
||||
* <p>
|
||||
* ElasticSearch automatically recognises array types so they are
|
||||
* not explicitly mapped as such. For arrays of objects the type
|
||||
* must be set to <i>nested</i> so the arrays are searched properly
|
||||
* see https://www.elastic.co/guide/en/elasticsearch/guide/current/nested-objects.html
|
||||
* <p>
|
||||
* It is expected that indexes to which these mappings are applied have their
|
||||
* default analyzer set to "keyword", which does not tokenise fields. The
|
||||
* index-wide default analyzer cannot be set via these mappings, so needs to be
|
||||
* set in the index settings during index creation. For the results mapping the
|
||||
* _all field is disabled and a custom all field is used in its place. The index
|
||||
* settings must have {@code "index.query.default_field": "all_field_values" } set
|
||||
* for the queries to use the custom all field. The custom all field has its
|
||||
* analyzer set to "whitespace" by these mappings, so that it gets tokenised
|
||||
* using whitespace.
|
||||
*/
|
||||
public class ElasticsearchMappings {
|
||||
/**
|
||||
* String constants used in mappings
|
||||
*/
|
||||
static final String ENABLED = "enabled";
|
||||
static final String ANALYZER = "analyzer";
|
||||
static final String WHITESPACE = "whitespace";
|
||||
static final String NESTED = "nested";
|
||||
static final String COPY_TO = "copy_to";
|
||||
static final String PROPERTIES = "properties";
|
||||
static final String TYPE = "type";
|
||||
static final String DYNAMIC = "dynamic";
|
||||
|
||||
/**
|
||||
* Name of the custom 'all' field for results
|
||||
*/
|
||||
public static final String ALL_FIELD_VALUES = "all_field_values";
|
||||
|
||||
/**
|
||||
* Name of the Elasticsearch field by which documents are sorted by default
|
||||
*/
|
||||
static final String ES_DOC = "_doc";
|
||||
|
||||
/**
|
||||
* Elasticsearch data types
|
||||
*/
|
||||
static final String BOOLEAN = "boolean";
|
||||
static final String DATE = "date";
|
||||
static final String DOUBLE = "double";
|
||||
static final String INTEGER = "integer";
|
||||
static final String KEYWORD = "keyword";
|
||||
static final String LONG = "long";
|
||||
static final String TEXT = "text";
|
||||
|
||||
private ElasticsearchMappings() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Elasticsearch mapping for results objects
|
||||
* {@link Bucket}s, {@link AnomalyRecord}s, {@link Influencer} and
|
||||
* {@link BucketInfluencer}
|
||||
*
|
||||
* The mapping has a custom all field containing the *_FIELD_VALUE fields
|
||||
* e.g. BY_FIELD_VALUE, OVER_FIELD_VALUE, etc. The custom all field {@link #ALL_FIELD_VALUES}
|
||||
* must be set in the index settings. A custom all field is preferred over the usual
|
||||
* '_all' field as most fields do not belong in '_all', disabling '_all' and
|
||||
* using a custom all field simplifies the mapping.
|
||||
*
|
||||
* These fields are copied to the custom all field
|
||||
* <ul>
|
||||
* <li>by_field_value</li>
|
||||
* <li>partition_field_value</li>
|
||||
* <li>over_field_value</li>
|
||||
* <li>AnomalyCause.correlated_by_field_value</li>
|
||||
* <li>AnomalyCause.by_field_value</li>
|
||||
* <li>AnomalyCause.partition_field_value</li>
|
||||
* <li>AnomalyCause.over_field_value</li>
|
||||
* <li>AnomalyRecord.Influencers.influencer_field_values</li>
|
||||
* <li>Influencer.influencer_field_value</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param termFieldNames All the term fields (by, over, partition) and influencers
|
||||
* included in the mapping
|
||||
*
|
||||
* @return The mapping
|
||||
* @throws IOException On write error
|
||||
*/
|
||||
public static XContentBuilder resultsMapping(Collection<String> termFieldNames) throws IOException {
|
||||
XContentBuilder builder = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(Result.TYPE.getPreferredName())
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(ALL_FIELD_VALUES)
|
||||
.field(TYPE, TEXT)
|
||||
.field(ANALYZER, WHITESPACE)
|
||||
.endObject()
|
||||
.startObject(Result.RESULT_TYPE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Job.ID.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.startObject(Bucket.TIMESTAMP.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.startObject(Bucket.ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.RAW_ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(Bucket.IS_INTERIM.getPreferredName())
|
||||
.field(TYPE, BOOLEAN)
|
||||
.endObject()
|
||||
.startObject(Bucket.RECORD_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(Bucket.EVENT_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(Bucket.BUCKET_SPAN.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(Bucket.PROCESSING_TIME_MS.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(Bucket.PARTITION_SCORES.getPreferredName())
|
||||
.field(TYPE, NESTED)
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(AnomalyRecord.PARTITION_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
||||
.startObject(Bucket.BUCKET_INFLUENCERS.getPreferredName())
|
||||
.field(TYPE, NESTED)
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(Job.ID.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Result.RESULT_TYPE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.INFLUENCER_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.INITIAL_ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.RAW_ANOMALY_SCORE.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.TIMESTAMP.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.BUCKET_SPAN.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.SEQUENCE_NUM.getPreferredName())
|
||||
.field(TYPE, INTEGER)
|
||||
.endObject()
|
||||
.startObject(BucketInfluencer.IS_INTERIM.getPreferredName())
|
||||
.field(TYPE, BOOLEAN)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
||||
// per-partition max probabilities mapping
|
||||
.startObject(PerPartitionMaxProbabilities.PER_PARTITION_MAX_PROBABILITIES.getPreferredName())
|
||||
.field(TYPE, NESTED)
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
|
||||
// Model Debug Output
|
||||
.startObject(ModelDebugOutput.DEBUG_FEATURE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(ModelDebugOutput.DEBUG_LOWER.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(ModelDebugOutput.DEBUG_UPPER.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(ModelDebugOutput.DEBUG_MEDIAN.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject();
|
||||
|
||||
addAnomalyRecordFieldsToMapping(builder);
|
||||
addInfluencerFieldsToMapping(builder);
|
||||
addModelSizeStatsFieldsToMapping(builder);
|
||||
|
||||
for (String fieldName : termFieldNames) {
|
||||
if (ReservedFieldNames.isValidFieldName(fieldName)) {
|
||||
builder.startObject(fieldName).field(TYPE, KEYWORD).endObject();
|
||||
}
|
||||
}
|
||||
|
||||
// End result properties
|
||||
builder.endObject();
|
||||
// End result
|
||||
builder.endObject();
|
||||
// End mapping
|
||||
builder.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* AnomalyRecord fields to be added under the 'properties' section of the mapping
|
||||
* @param builder Add properties to this builder
|
||||
* @return builder
|
||||
* @throws IOException On write error
|
||||
*/
|
||||
private static XContentBuilder addAnomalyRecordFieldsToMapping(XContentBuilder builder)
|
||||
throws IOException {
|
||||
builder.startObject(AnomalyRecord.DETECTOR_INDEX.getPreferredName())
|
||||
.field(TYPE, INTEGER)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.SEQUENCE_NUM.getPreferredName())
|
||||
.field(TYPE, INTEGER)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.ACTUAL.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.TYPICAL.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.FUNCTION.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.FUNCTION_DESCRIPTION.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.BY_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.BY_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.PARTITION_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.OVER_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.OVER_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.NORMALIZED_PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.INITIAL_NORMALIZED_PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.CAUSES.getPreferredName())
|
||||
.field(TYPE, NESTED)
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(AnomalyCause.ACTUAL.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.TYPICAL.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.PROBABILITY.getPreferredName())
|
||||
.field(TYPE, DOUBLE)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.FUNCTION.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.FUNCTION_DESCRIPTION.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.BY_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.BY_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.CORRELATED_BY_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.PARTITION_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.PARTITION_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.OVER_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AnomalyCause.OVER_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject(AnomalyRecord.INFLUENCERS.getPreferredName())
|
||||
/* Array of influences */
|
||||
.field(TYPE, NESTED)
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(Influence.INFLUENCER_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Influence.INFLUENCER_FIELD_VALUES.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static XContentBuilder addInfluencerFieldsToMapping(XContentBuilder builder) throws IOException {
|
||||
builder.startObject(Influencer.INFLUENCER_FIELD_NAME.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Influencer.INFLUENCER_FIELD_VALUE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.field(COPY_TO, ALL_FIELD_VALUES)
|
||||
.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DataCounts} mapping.
|
||||
* The type is disabled so {@link DataCounts} aren't searchable and
|
||||
* the '_all' field is disabled
|
||||
*
|
||||
* @return The builder
|
||||
* @throws IOException On builder write error
|
||||
*/
|
||||
public static XContentBuilder dataCountsMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(DataCounts.TYPE.getPreferredName())
|
||||
.field(ENABLED, false)
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(Job.ID.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(DataCounts.PROCESSED_RECORD_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(DataCounts.PROCESSED_FIELD_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(DataCounts.INPUT_BYTES.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(DataCounts.INPUT_RECORD_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(DataCounts.INPUT_FIELD_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(DataCounts.INVALID_DATE_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(DataCounts.MISSING_FIELD_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(DataCounts.OUT_OF_ORDER_TIME_COUNT.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(DataCounts.EARLIEST_RECORD_TIME.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.startObject(DataCounts.LATEST_RECORD_TIME.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CategorizerState} mapping.
|
||||
* The type is disabled so {@link CategorizerState} is not searchable and
|
||||
* the '_all' field is disabled
|
||||
*
|
||||
* @return The builder
|
||||
* @throws IOException On builder write error
|
||||
*/
|
||||
public static XContentBuilder categorizerStateMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(CategorizerState.TYPE)
|
||||
.field(ENABLED, false)
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Elasticsearch mapping for {@linkplain Quantiles}.
|
||||
* The type is disabled as is the '_all' field as the document isn't meant to be searched.
|
||||
* <p>
|
||||
* The quantile state string is not searchable (enabled = false) as it could be
|
||||
* very large.
|
||||
*/
|
||||
public static XContentBuilder quantilesMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(Quantiles.TYPE.getPreferredName())
|
||||
.field(ENABLED, false)
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Elasticsearch mapping for {@linkplain CategoryDefinition}.
|
||||
* The '_all' field is disabled as the document isn't meant to be searched.
|
||||
*
|
||||
* @return The builder
|
||||
* @throws IOException On builder error
|
||||
*/
|
||||
public static XContentBuilder categoryDefinitionMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(CategoryDefinition.TYPE.getPreferredName())
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(CategoryDefinition.CATEGORY_ID.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(Job.ID.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(CategoryDefinition.TERMS.getPreferredName())
|
||||
.field(TYPE, TEXT)
|
||||
.endObject()
|
||||
.startObject(CategoryDefinition.REGEX.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(CategoryDefinition.MAX_MATCHING_LENGTH.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(CategoryDefinition.EXAMPLES.getPreferredName())
|
||||
.field(TYPE, TEXT)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Elasticsearch mapping for {@linkplain ModelState}.
|
||||
* The model state could potentially be huge (over a gigabyte in size)
|
||||
* so all analysis by Elasticsearch is disabled. The only way to
|
||||
* retrieve the model state is by knowing the ID of a particular
|
||||
* document or by searching for all documents of this type.
|
||||
*/
|
||||
public static XContentBuilder modelStateMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(ModelState.TYPE.getPreferredName())
|
||||
.field(ENABLED, false)
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Elasticsearch mapping for {@linkplain ModelSnapshot}.
|
||||
* The '_all' field is disabled but the type is searchable
|
||||
*/
|
||||
public static XContentBuilder modelSnapshotMapping() throws IOException {
|
||||
XContentBuilder builder = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(ModelSnapshot.TYPE.getPreferredName())
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(Job.ID.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(ModelSnapshot.TIMESTAMP.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.startObject(ModelSnapshot.DESCRIPTION.getPreferredName())
|
||||
.field(TYPE, TEXT)
|
||||
.endObject()
|
||||
.startObject(ModelSnapshot.RESTORE_PRIORITY.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(ModelSnapshot.SNAPSHOT_ID.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(ModelSnapshot.SNAPSHOT_DOC_COUNT.getPreferredName())
|
||||
.field(TYPE, INTEGER)
|
||||
.endObject()
|
||||
.startObject(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName())
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(Job.ID.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Result.RESULT_TYPE.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(ModelSizeStats.TIMESTAMP_FIELD.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject();
|
||||
|
||||
addModelSizeStatsFieldsToMapping(builder);
|
||||
|
||||
builder.endObject()
|
||||
.endObject()
|
||||
.startObject(Quantiles.TYPE.getPreferredName())
|
||||
.field(ENABLED, false)
|
||||
.endObject()
|
||||
.startObject(ModelSnapshot.LATEST_RECORD_TIME.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.startObject(ModelSnapshot.LATEST_RESULT_TIME.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ModelSizeStats} fields to be added under the 'properties' section of the mapping
|
||||
* @param builder Add properties to this builder
|
||||
* @return builder
|
||||
* @throws IOException On write error
|
||||
*/
|
||||
private static XContentBuilder addModelSizeStatsFieldsToMapping(XContentBuilder builder) throws IOException {
|
||||
builder.startObject(ModelSizeStats.MODEL_BYTES_FIELD.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(ModelSizeStats.TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(ModelSizeStats.TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(ModelSizeStats.TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(ModelSizeStats.BUCKET_ALLOCATION_FAILURES_COUNT_FIELD.getPreferredName())
|
||||
.field(TYPE, LONG)
|
||||
.endObject()
|
||||
.startObject(ModelSizeStats.MEMORY_STATUS_FIELD.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(ModelSizeStats.LOG_TIME_FIELD.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static XContentBuilder auditMessageMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(AuditMessage.TYPE.getPreferredName())
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(Job.ID.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AuditMessage.LEVEL.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(AuditMessage.MESSAGE.getPreferredName())
|
||||
.field(TYPE, TEXT)
|
||||
.endObject()
|
||||
.startObject(AuditMessage.TIMESTAMP.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
|
||||
public static XContentBuilder auditActivityMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(AuditActivity.TYPE.getPreferredName())
|
||||
.startObject(PROPERTIES)
|
||||
.startObject(AuditActivity.TIMESTAMP.getPreferredName())
|
||||
.field(TYPE, DATE)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
}
|
@ -0,0 +1,164 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.xpack.ml.job.results.Influencer;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* One time query builder for influencers.
|
||||
* <ul>
|
||||
* <li>From- Skip the first N Influencers. This parameter is for paging if not
|
||||
* required set to 0. Default = 0</li>
|
||||
* <li>Size- Take only this number of Influencers. Default =
|
||||
* {@value DEFAULT_SIZE}</li>
|
||||
* <li>IncludeInterim- Include interim results. Default = false</li>
|
||||
* <li>anomalyScoreThreshold- Return only influencers with an anomalyScore >=
|
||||
* this value. Default = 0.0</li>
|
||||
* <li>start- The start influencer time. An influencer with this timestamp will be
|
||||
* included in the results. If 0 all influencers up to <code>end</code> are
|
||||
* returned. Default = -1</li>
|
||||
* <li>end- The end influencer timestamp. Influencers up to but NOT including this
|
||||
* timestamp are returned. If 0 all influencers from <code>start</code> are
|
||||
* returned. Default = -1</li>
|
||||
* <li>partitionValue Set the bucket's max normalized probability to this
|
||||
* partition field value's max normalized probability. Default = null</li>
|
||||
* </ul>
|
||||
*/
|
||||
public final class InfluencersQueryBuilder {
|
||||
public static final int DEFAULT_SIZE = 100;
|
||||
|
||||
private InfluencersQuery influencersQuery = new InfluencersQuery();
|
||||
|
||||
public InfluencersQueryBuilder from(int from) {
|
||||
influencersQuery.from = from;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InfluencersQueryBuilder size(int size) {
|
||||
influencersQuery.size = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InfluencersQueryBuilder includeInterim(boolean include) {
|
||||
influencersQuery.includeInterim = include;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InfluencersQueryBuilder anomalyScoreThreshold(Double anomalyScoreFilter) {
|
||||
influencersQuery.anomalyScoreFilter = anomalyScoreFilter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InfluencersQueryBuilder sortField(String sortField) {
|
||||
influencersQuery.sortField = sortField;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InfluencersQueryBuilder sortDescending(boolean sortDescending) {
|
||||
influencersQuery.sortDescending = sortDescending;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* If startTime >= 0 the parameter is not set
|
||||
*/
|
||||
public InfluencersQueryBuilder start(String startTime) {
|
||||
influencersQuery.start = startTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* If endTime >= 0 the parameter is not set
|
||||
*/
|
||||
public InfluencersQueryBuilder end(String endTime) {
|
||||
influencersQuery.end = endTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InfluencersQueryBuilder.InfluencersQuery build() {
|
||||
return influencersQuery;
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
influencersQuery = new InfluencersQueryBuilder.InfluencersQuery();
|
||||
}
|
||||
|
||||
|
||||
public class InfluencersQuery {
|
||||
private int from = 0;
|
||||
private int size = DEFAULT_SIZE;
|
||||
private boolean includeInterim = false;
|
||||
private double anomalyScoreFilter = 0.0d;
|
||||
private String start;
|
||||
private String end;
|
||||
private String sortField = Influencer.ANOMALY_SCORE.getPreferredName();
|
||||
private boolean sortDescending = false;
|
||||
|
||||
public int getFrom() {
|
||||
return from;
|
||||
}
|
||||
|
||||
public int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public boolean isIncludeInterim() {
|
||||
return includeInterim;
|
||||
}
|
||||
|
||||
public double getAnomalyScoreFilter() {
|
||||
return anomalyScoreFilter;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
public String getSortField() {
|
||||
return sortField;
|
||||
}
|
||||
|
||||
public boolean isSortDescending() {
|
||||
return sortDescending;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(from, size, includeInterim, anomalyScoreFilter, start, end, sortField, sortDescending);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
InfluencersQuery other = (InfluencersQuery) obj;
|
||||
return Objects.equals(from, other.from) &&
|
||||
Objects.equals(size, other.size) &&
|
||||
Objects.equals(includeInterim, other.includeInterim) &&
|
||||
Objects.equals(start, other.start) &&
|
||||
Objects.equals(end, other.end) &&
|
||||
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
|
||||
Objects.equals(sortField, other.sortField) &&
|
||||
this.sortDescending == other.sortDescending;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
/**
|
||||
* Update a job's dataCounts
|
||||
* i.e. the number of processed records, fields etc.
|
||||
*/
|
||||
public class JobDataCountsPersister extends AbstractComponent {
|
||||
|
||||
private final Client client;
|
||||
|
||||
public JobDataCountsPersister(Settings settings, Client client) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
private XContentBuilder serialiseCounts(DataCounts counts) throws IOException {
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
return counts.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the job's data counts stats and figures.
|
||||
*
|
||||
* @param jobId Job to update
|
||||
* @param counts The counts
|
||||
* @param listener Action response listener
|
||||
*/
|
||||
public void persistDataCounts(String jobId, DataCounts counts, ActionListener<Boolean> listener) {
|
||||
try {
|
||||
XContentBuilder content = serialiseCounts(counts);
|
||||
client.prepareIndex(AnomalyDetectorsIndex.jobResultsIndexName(jobId), DataCounts.TYPE.getPreferredName(),
|
||||
DataCounts.documentId(jobId))
|
||||
.setSource(content).execute(new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse indexResponse) {
|
||||
listener.onResponse(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
|
||||
} catch (IOException ioe) {
|
||||
logger.warn((Supplier<?>)() -> new ParameterizedMessage("[{}] Error serialising DataCounts stats", jobId), ioe);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,228 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteAction;
|
||||
import org.elasticsearch.action.delete.DeleteRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelState;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
import org.elasticsearch.xpack.ml.job.results.Result;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
public class JobDataDeleter {
|
||||
|
||||
private static final Logger LOGGER = Loggers.getLogger(JobDataDeleter.class);
|
||||
|
||||
private static final int SCROLL_SIZE = 1000;
|
||||
private static final String SCROLL_CONTEXT_DURATION = "5m";
|
||||
|
||||
private final Client client;
|
||||
private final String jobId;
|
||||
private final BulkRequestBuilder bulkRequestBuilder;
|
||||
private long deletedResultCount;
|
||||
private long deletedModelSnapshotCount;
|
||||
private long deletedModelStateCount;
|
||||
private boolean quiet;
|
||||
|
||||
public JobDataDeleter(Client client, String jobId) {
|
||||
this(client, jobId, false);
|
||||
}
|
||||
|
||||
public JobDataDeleter(Client client, String jobId, boolean quiet) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
bulkRequestBuilder = client.prepareBulk();
|
||||
deletedResultCount = 0;
|
||||
deletedModelSnapshotCount = 0;
|
||||
deletedModelStateCount = 0;
|
||||
this.quiet = quiet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously delete all result types (Buckets, Records, Influencers) from {@code cutOffTime}
|
||||
*
|
||||
* @param cutoffEpochMs Results at and after this time will be deleted
|
||||
* @param listener Response listener
|
||||
*/
|
||||
public void deleteResultsFromTime(long cutoffEpochMs, ActionListener<Boolean> listener) {
|
||||
String index = AnomalyDetectorsIndex.jobResultsIndexName(jobId);
|
||||
|
||||
RangeQueryBuilder timeRange = QueryBuilders.rangeQuery(Bucket.TIMESTAMP.getPreferredName());
|
||||
timeRange.gte(cutoffEpochMs);
|
||||
timeRange.lt(new Date().getTime());
|
||||
|
||||
RepeatingSearchScrollListener scrollSearchListener = new RepeatingSearchScrollListener(index, listener);
|
||||
|
||||
client.prepareSearch(index)
|
||||
.setTypes(Result.TYPE.getPreferredName())
|
||||
.setFetchSource(false)
|
||||
.setQuery(timeRange)
|
||||
.setScroll(SCROLL_CONTEXT_DURATION)
|
||||
.setSize(SCROLL_SIZE)
|
||||
.execute(scrollSearchListener);
|
||||
}
|
||||
|
||||
private void addDeleteRequestForSearchHits(SearchHits hits, String index) {
|
||||
for (SearchHit hit : hits.getHits()) {
|
||||
LOGGER.trace("Search hit for result: {}", hit.getId());
|
||||
addDeleteRequest(hit, index);
|
||||
}
|
||||
deletedResultCount = hits.getTotalHits();
|
||||
}
|
||||
|
||||
private void addDeleteRequest(SearchHit hit, String index) {
|
||||
DeleteRequestBuilder deleteRequest = DeleteAction.INSTANCE.newRequestBuilder(client)
|
||||
.setIndex(index)
|
||||
.setType(hit.getType())
|
||||
.setId(hit.getId());
|
||||
bulkRequestBuilder.add(deleteRequest);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a {@code ModelSnapshot}
|
||||
*
|
||||
* @param modelSnapshot the model snapshot to delete
|
||||
*/
|
||||
public void deleteModelSnapshot(ModelSnapshot modelSnapshot) {
|
||||
String snapshotId = modelSnapshot.getSnapshotId();
|
||||
int docCount = modelSnapshot.getSnapshotDocCount();
|
||||
String stateIndexName = AnomalyDetectorsIndex.jobStateIndexName();
|
||||
// Deduce the document IDs of the state documents from the information
|
||||
// in the snapshot document - we cannot query the state itself as it's
|
||||
// too big and has no mappings
|
||||
for (int i = 0; i < docCount; ++i) {
|
||||
String stateId = snapshotId + '_' + i;
|
||||
bulkRequestBuilder.add(client.prepareDelete(stateIndexName, ModelState.TYPE.getPreferredName(), stateId));
|
||||
++deletedModelStateCount;
|
||||
}
|
||||
|
||||
bulkRequestBuilder.add(client.prepareDelete(AnomalyDetectorsIndex.jobResultsIndexName(modelSnapshot.getJobId()),
|
||||
ModelSnapshot.TYPE.getPreferredName(), snapshotId));
|
||||
++deletedModelSnapshotCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all results marked as interim
|
||||
*/
|
||||
public void deleteInterimResults() {
|
||||
String index = AnomalyDetectorsIndex.jobResultsIndexName(jobId);
|
||||
|
||||
QueryBuilder qb = QueryBuilders.termQuery(Bucket.IS_INTERIM.getPreferredName(), true);
|
||||
|
||||
SearchResponse searchResponse = client.prepareSearch(index)
|
||||
.setTypes(Result.TYPE.getPreferredName())
|
||||
.setQuery(new ConstantScoreQueryBuilder(qb))
|
||||
.setFetchSource(false)
|
||||
.setScroll(SCROLL_CONTEXT_DURATION)
|
||||
.setSize(SCROLL_SIZE)
|
||||
.get();
|
||||
|
||||
String scrollId = searchResponse.getScrollId();
|
||||
long totalHits = searchResponse.getHits().getTotalHits();
|
||||
long totalDeletedCount = 0;
|
||||
while (totalDeletedCount < totalHits) {
|
||||
for (SearchHit hit : searchResponse.getHits()) {
|
||||
LOGGER.trace("Search hit for result: {}", hit.getId());
|
||||
++totalDeletedCount;
|
||||
addDeleteRequest(hit, index);
|
||||
++deletedResultCount;
|
||||
}
|
||||
|
||||
searchResponse = client.prepareSearchScroll(scrollId).setScroll(SCROLL_CONTEXT_DURATION).get();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the deletions without enforcing the removal of data from disk
|
||||
*/
|
||||
public void commit(ActionListener<BulkResponse> listener) {
|
||||
if (bulkRequestBuilder.numberOfActions() == 0) {
|
||||
listener.onResponse(new BulkResponse(new BulkItemResponse[0], 0L));
|
||||
return;
|
||||
}
|
||||
|
||||
Level logLevel = quiet ? Level.DEBUG : Level.INFO;
|
||||
LOGGER.log(logLevel, "Requesting deletion of {} results, {} model snapshots and {} model state documents",
|
||||
deletedResultCount, deletedModelSnapshotCount, deletedModelStateCount);
|
||||
|
||||
try {
|
||||
bulkRequestBuilder.execute(listener);
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Blocking version of {@linkplain #commit(ActionListener)}
|
||||
*/
|
||||
public void commit() {
|
||||
if (bulkRequestBuilder.numberOfActions() == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
Level logLevel = quiet ? Level.DEBUG : Level.INFO;
|
||||
LOGGER.log(logLevel, "Requesting deletion of {} results, {} model snapshots and {} model state documents",
|
||||
deletedResultCount, deletedModelSnapshotCount, deletedModelStateCount);
|
||||
|
||||
BulkResponse response = bulkRequestBuilder.get();
|
||||
if (response.hasFailures()) {
|
||||
LOGGER.debug("Bulk request has failures. {}", response.buildFailureMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Repeats a scroll search adding the hits to the bulk delete request
|
||||
*/
|
||||
private class RepeatingSearchScrollListener implements ActionListener<SearchResponse> {
|
||||
|
||||
private final AtomicLong totalDeletedCount;
|
||||
private final String index;
|
||||
private final ActionListener<Boolean> scrollFinishedListener;
|
||||
|
||||
RepeatingSearchScrollListener(String index, ActionListener<Boolean> scrollFinishedListener) {
|
||||
totalDeletedCount = new AtomicLong(0L);
|
||||
this.index = index;
|
||||
this.scrollFinishedListener = scrollFinishedListener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResponse(SearchResponse searchResponse) {
|
||||
addDeleteRequestForSearchHits(searchResponse.getHits(), index);
|
||||
|
||||
totalDeletedCount.addAndGet(searchResponse.getHits().getHits().length);
|
||||
if (totalDeletedCount.get() < searchResponse.getHits().getTotalHits()) {
|
||||
client.prepareSearchScroll(searchResponse.getScrollId()).setScroll(SCROLL_CONTEXT_DURATION).execute(this);
|
||||
}
|
||||
else {
|
||||
scrollFinishedListener.onResponse(true);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
scrollFinishedListener.onFailure(e);
|
||||
}
|
||||
};
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.process.normalizer.BucketNormalizable;
|
||||
import org.elasticsearch.xpack.ml.job.process.normalizer.Normalizable;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
import org.elasticsearch.xpack.ml.job.results.BucketInfluencer;
|
||||
import org.elasticsearch.xpack.ml.job.results.Result;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
|
||||
/**
|
||||
* Interface for classes that update {@linkplain Bucket Buckets}
|
||||
* for a particular job with new normalized anomaly scores and
|
||||
* unusual scores.
|
||||
* <p>
|
||||
* Renormalized results must already have an ID.
|
||||
* <p>
|
||||
* This class is NOT thread safe.
|
||||
*/
|
||||
public class JobRenormalizedResultsPersister extends AbstractComponent {
|
||||
|
||||
private final Client client;
|
||||
private BulkRequest bulkRequest;
|
||||
|
||||
public JobRenormalizedResultsPersister(Settings settings, Client client) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
bulkRequest = new BulkRequest();
|
||||
}
|
||||
|
||||
public void updateBucket(BucketNormalizable normalizable) {
|
||||
updateResult(normalizable.getId(), normalizable.getOriginatingIndex(), normalizable.getBucket());
|
||||
updateBucketInfluencersStandalone(normalizable.getOriginatingIndex(), normalizable.getBucket().getBucketInfluencers());
|
||||
}
|
||||
|
||||
private void updateBucketInfluencersStandalone(String indexName, List<BucketInfluencer> bucketInfluencers) {
|
||||
if (bucketInfluencers != null && bucketInfluencers.isEmpty() == false) {
|
||||
for (BucketInfluencer bucketInfluencer : bucketInfluencers) {
|
||||
updateResult(bucketInfluencer.getId(), indexName, bucketInfluencer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void updateResults(List<Normalizable> normalizables) {
|
||||
for (Normalizable normalizable : normalizables) {
|
||||
updateResult(normalizable.getId(), normalizable.getOriginatingIndex(), normalizable);
|
||||
}
|
||||
}
|
||||
|
||||
public void updateResult(String id, String index, ToXContent resultDoc) {
|
||||
try {
|
||||
XContentBuilder content = toXContentBuilder(resultDoc);
|
||||
bulkRequest.add(new IndexRequest(index, Result.TYPE.getPreferredName(), id).source(content));
|
||||
} catch (IOException e) {
|
||||
logger.error("Error serialising result", e);
|
||||
}
|
||||
}
|
||||
|
||||
private XContentBuilder toXContentBuilder(ToXContent obj) throws IOException {
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
obj.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the bulk action
|
||||
*
|
||||
* @param jobId The job Id
|
||||
*/
|
||||
public void executeRequest(String jobId) {
|
||||
if (bulkRequest.numberOfActions() == 0) {
|
||||
return;
|
||||
}
|
||||
logger.trace("[{}] ES API CALL: bulk request with {} actions", jobId, bulkRequest.numberOfActions());
|
||||
|
||||
BulkResponse addRecordsResponse = client.bulk(bulkRequest).actionGet();
|
||||
if (addRecordsResponse.hasFailures()) {
|
||||
logger.error("[{}] Bulk index of results has errors: {}", jobId, addRecordsResponse.buildFailureMessage());
|
||||
}
|
||||
|
||||
bulkRequest = new BulkRequest();
|
||||
}
|
||||
|
||||
BulkRequest getBulkRequest() {
|
||||
return bulkRequest;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,376 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles;
|
||||
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
import org.elasticsearch.xpack.ml.job.results.BucketInfluencer;
|
||||
import org.elasticsearch.xpack.ml.job.results.CategoryDefinition;
|
||||
import org.elasticsearch.xpack.ml.job.results.Influencer;
|
||||
import org.elasticsearch.xpack.ml.job.results.ModelDebugOutput;
|
||||
import org.elasticsearch.xpack.ml.job.results.PerPartitionMaxProbabilities;
|
||||
import org.elasticsearch.xpack.ml.job.results.Result;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
/**
|
||||
* Persists result types, Quantiles etc to Elasticsearch<br>
|
||||
* <h2>Bucket</h2> Bucket result. The anomaly score of the bucket may not match the summed
|
||||
* score of all the records as all the records may not have been outputted for the
|
||||
* bucket. Contains bucket influencers that are persisted both with the bucket
|
||||
* and separately.
|
||||
* <b>Anomaly Record</b> Each record was generated by a detector which can be identified via
|
||||
* the detectorIndex field.
|
||||
* <b>Influencers</b>
|
||||
* <b>Quantiles</b> may contain model quantiles used in normalization and are
|
||||
* stored in documents of type {@link Quantiles#TYPE} <br>
|
||||
* <b>ModelSizeStats</b> This is stored in a flat structure <br>
|
||||
* <b>ModelSnapShot</b> This is stored in a flat structure <br>
|
||||
*
|
||||
* @see org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings
|
||||
*/
|
||||
public class JobResultsPersister extends AbstractComponent {
|
||||
|
||||
private final Client client;
|
||||
|
||||
|
||||
public JobResultsPersister(Settings settings, Client client) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
public Builder bulkPersisterBuilder(String jobId) {
|
||||
return new Builder(jobId);
|
||||
}
|
||||
|
||||
public class Builder {
|
||||
private BulkRequest bulkRequest;
|
||||
private final String jobId;
|
||||
private final String indexName;
|
||||
|
||||
private Builder(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
indexName = AnomalyDetectorsIndex.jobResultsIndexName(jobId);
|
||||
bulkRequest = new BulkRequest();
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist the result bucket and its bucket influencers
|
||||
* Buckets are persisted with a consistent ID
|
||||
*
|
||||
* @param bucket The bucket to persist
|
||||
* @return this
|
||||
*/
|
||||
public Builder persistBucket(Bucket bucket) {
|
||||
// If the supplied bucket has records then create a copy with records
|
||||
// removed, because we never persist nested records in buckets
|
||||
Bucket bucketWithoutRecords = bucket;
|
||||
if (!bucketWithoutRecords.getRecords().isEmpty()) {
|
||||
bucketWithoutRecords = new Bucket(bucket);
|
||||
bucketWithoutRecords.setRecords(Collections.emptyList());
|
||||
}
|
||||
try {
|
||||
XContentBuilder content = toXContentBuilder(bucketWithoutRecords);
|
||||
logger.trace("[{}] ES API CALL: index result type {} to index {} at epoch {}",
|
||||
jobId, Bucket.RESULT_TYPE_VALUE, indexName, bucketWithoutRecords.getEpoch());
|
||||
|
||||
bulkRequest.add(new IndexRequest(indexName, Result.TYPE.getPreferredName(),
|
||||
bucketWithoutRecords.getId()).source(content));
|
||||
|
||||
persistBucketInfluencersStandalone(jobId, bucketWithoutRecords.getBucketInfluencers());
|
||||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("[{}] Error serialising bucket", new Object[] {jobId}), e);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
private void persistBucketInfluencersStandalone(String jobId, List<BucketInfluencer> bucketInfluencers)
|
||||
throws IOException {
|
||||
if (bucketInfluencers != null && bucketInfluencers.isEmpty() == false) {
|
||||
for (BucketInfluencer bucketInfluencer : bucketInfluencers) {
|
||||
XContentBuilder content = serialiseBucketInfluencerStandalone(bucketInfluencer);
|
||||
// Need consistent IDs to ensure overwriting on renormalization
|
||||
String id = bucketInfluencer.getId();
|
||||
logger.trace("[{}] ES BULK ACTION: index result type {} to index {} with ID {}",
|
||||
jobId, BucketInfluencer.RESULT_TYPE_VALUE, indexName, id);
|
||||
bulkRequest.add(new IndexRequest(indexName, Result.TYPE.getPreferredName(), id).source(content));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist a list of anomaly records
|
||||
*
|
||||
* @param records the records to persist
|
||||
* @return this
|
||||
*/
|
||||
public Builder persistRecords(List<AnomalyRecord> records) {
|
||||
|
||||
try {
|
||||
for (AnomalyRecord record : records) {
|
||||
XContentBuilder content = toXContentBuilder(record);
|
||||
logger.trace("[{}] ES BULK ACTION: index result type {} to index {} with ID {}",
|
||||
jobId, AnomalyRecord.RESULT_TYPE_VALUE, indexName, record.getId());
|
||||
bulkRequest.add(new IndexRequest(indexName, Result.TYPE.getPreferredName(), record.getId()).source(content));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("[{}] Error serialising records", new Object [] {jobId}), e);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist a list of influencers optionally using each influencer's ID or
|
||||
* an auto generated ID
|
||||
*
|
||||
* @param influencers the influencers to persist
|
||||
* @return this
|
||||
*/
|
||||
public Builder persistInfluencers(List<Influencer> influencers) {
|
||||
try {
|
||||
for (Influencer influencer : influencers) {
|
||||
XContentBuilder content = toXContentBuilder(influencer);
|
||||
logger.trace("[{}] ES BULK ACTION: index result type {} to index {} with ID {}",
|
||||
jobId, Influencer.RESULT_TYPE_VALUE, indexName, influencer.getId());
|
||||
bulkRequest.add(new IndexRequest(indexName, Result.TYPE.getPreferredName(), influencer.getId()).source(content));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("[{}] Error serialising influencers", new Object[] {jobId}), e);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist {@link PerPartitionMaxProbabilities}
|
||||
*
|
||||
* @param partitionProbabilities The probabilities to persist
|
||||
* @return this
|
||||
*/
|
||||
public Builder persistPerPartitionMaxProbabilities(PerPartitionMaxProbabilities partitionProbabilities) {
|
||||
try {
|
||||
XContentBuilder builder = toXContentBuilder(partitionProbabilities);
|
||||
logger.trace("[{}] ES API CALL: index result type {} to index {} at timestamp {} with ID {}",
|
||||
jobId, PerPartitionMaxProbabilities.RESULT_TYPE_VALUE, indexName, partitionProbabilities.getTimestamp(),
|
||||
partitionProbabilities.getId());
|
||||
bulkRequest.add(
|
||||
new IndexRequest(indexName, Result.TYPE.getPreferredName(), partitionProbabilities.getId()).source(builder));
|
||||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("[{}] error serialising bucket per partition max normalized scores",
|
||||
new Object[]{jobId}), e);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the bulk action
|
||||
*/
|
||||
public void executeRequest() {
|
||||
if (bulkRequest.numberOfActions() == 0) {
|
||||
return;
|
||||
}
|
||||
logger.trace("[{}] ES API CALL: bulk request with {} actions", jobId, bulkRequest.numberOfActions());
|
||||
|
||||
BulkResponse addRecordsResponse = client.bulk(bulkRequest).actionGet();
|
||||
if (addRecordsResponse.hasFailures()) {
|
||||
logger.error("[{}] Bulk index of results has errors: {}", jobId, addRecordsResponse.buildFailureMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist the category definition
|
||||
*
|
||||
* @param category The category to be persisted
|
||||
*/
|
||||
public void persistCategoryDefinition(CategoryDefinition category) {
|
||||
Persistable persistable = new Persistable(category.getJobId(), category, CategoryDefinition.TYPE.getPreferredName(),
|
||||
CategoryDefinition.documentId(category.getJobId(), Long.toString(category.getCategoryId())));
|
||||
persistable.persist(AnomalyDetectorsIndex.jobResultsIndexName(category.getJobId()));
|
||||
// Don't commit as we expect masses of these updates and they're not
|
||||
// read again by this process
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist the quantiles
|
||||
*/
|
||||
public void persistQuantiles(Quantiles quantiles) {
|
||||
Persistable persistable = new Persistable(quantiles.getJobId(), quantiles, Quantiles.TYPE.getPreferredName(),
|
||||
Quantiles.documentId(quantiles.getJobId()));
|
||||
if (persistable.persist(AnomalyDetectorsIndex.jobStateIndexName())) {
|
||||
// Refresh the index when persisting quantiles so that previously
|
||||
// persisted results will be available for searching. Do this using the
|
||||
// indices API rather than the index API (used to write the quantiles
|
||||
// above), because this will refresh all shards rather than just the
|
||||
// shard that the quantiles document itself was written to.
|
||||
commitStateWrites(quantiles.getJobId());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist a model snapshot description
|
||||
*/
|
||||
public void persistModelSnapshot(ModelSnapshot modelSnapshot) {
|
||||
Persistable persistable = new Persistable(modelSnapshot.getJobId(), modelSnapshot, ModelSnapshot.TYPE.getPreferredName(),
|
||||
modelSnapshot.documentId());
|
||||
persistable.persist(AnomalyDetectorsIndex.jobResultsIndexName(modelSnapshot.getJobId()));
|
||||
}
|
||||
|
||||
public void updateModelSnapshot(ModelSnapshot modelSnapshot, Consumer<Boolean> handler, Consumer<Exception> errorHandler) {
|
||||
String index = AnomalyDetectorsIndex.jobResultsIndexName(modelSnapshot.getJobId());
|
||||
IndexRequest indexRequest = new IndexRequest(index, ModelSnapshot.TYPE.getPreferredName(), modelSnapshot.documentId());
|
||||
try {
|
||||
indexRequest.source(toXContentBuilder(modelSnapshot));
|
||||
} catch (IOException e) {
|
||||
errorHandler.accept(e);
|
||||
}
|
||||
client.index(indexRequest, ActionListener.wrap(r -> handler.accept(true), errorHandler));
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist the memory usage data
|
||||
*/
|
||||
public void persistModelSizeStats(ModelSizeStats modelSizeStats) {
|
||||
String jobId = modelSizeStats.getJobId();
|
||||
logger.trace("[{}] Persisting model size stats, for size {}", jobId, modelSizeStats.getModelBytes());
|
||||
Persistable persistable = new Persistable(modelSizeStats.getJobId(), modelSizeStats, Result.TYPE.getPreferredName(),
|
||||
ModelSizeStats.documentId(jobId));
|
||||
persistable.persist(AnomalyDetectorsIndex.jobResultsIndexName(jobId));
|
||||
persistable = new Persistable(modelSizeStats.getJobId(), modelSizeStats, Result.TYPE.getPreferredName(), null);
|
||||
persistable.persist(AnomalyDetectorsIndex.jobResultsIndexName(jobId));
|
||||
// Don't commit as we expect masses of these updates and they're only
|
||||
// for information at the API level
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist model debug output
|
||||
*/
|
||||
public void persistModelDebugOutput(ModelDebugOutput modelDebugOutput) {
|
||||
Persistable persistable = new Persistable(modelDebugOutput.getJobId(), modelDebugOutput, Result.TYPE.getPreferredName(), null);
|
||||
persistable.persist(AnomalyDetectorsIndex.jobResultsIndexName(modelDebugOutput.getJobId()));
|
||||
// Don't commit as we expect masses of these updates and they're not
|
||||
// read again by this process
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete any existing interim results synchronously
|
||||
*/
|
||||
public void deleteInterimResults(String jobId) {
|
||||
JobDataDeleter deleter = new JobDataDeleter(client, jobId, true);
|
||||
deleter.deleteInterimResults();
|
||||
deleter.commit();
|
||||
}
|
||||
|
||||
/**
|
||||
* Once all the job data has been written this function will be
|
||||
* called to commit the writes to the datastore.
|
||||
*
|
||||
* @param jobId The job Id
|
||||
* @return True if successful
|
||||
*/
|
||||
public boolean commitResultWrites(String jobId) {
|
||||
String indexName = AnomalyDetectorsIndex.jobResultsIndexName(jobId);
|
||||
// Refresh should wait for Lucene to make the data searchable
|
||||
logger.trace("[{}] ES API CALL: refresh index {}", jobId, indexName);
|
||||
client.admin().indices().refresh(new RefreshRequest(indexName)).actionGet();
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Once the job state has been written calling this function makes it
|
||||
* immediately searchable.
|
||||
*
|
||||
* @param jobId The job Id
|
||||
* @return True if successful
|
||||
* */
|
||||
public boolean commitStateWrites(String jobId) {
|
||||
String indexName = AnomalyDetectorsIndex.jobStateIndexName();
|
||||
// Refresh should wait for Lucene to make the data searchable
|
||||
logger.trace("[{}] ES API CALL: refresh index {}", jobId, indexName);
|
||||
RefreshRequest refreshRequest = new RefreshRequest(indexName);
|
||||
client.admin().indices().refresh(refreshRequest).actionGet();
|
||||
return true;
|
||||
}
|
||||
|
||||
XContentBuilder toXContentBuilder(ToXContent obj) throws IOException {
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
obj.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
return builder;
|
||||
}
|
||||
|
||||
private XContentBuilder serialiseBucketInfluencerStandalone(BucketInfluencer bucketInfluencer) throws IOException {
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
bucketInfluencer.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
return builder;
|
||||
}
|
||||
|
||||
private class Persistable {
|
||||
|
||||
private final String jobId;
|
||||
private final ToXContent object;
|
||||
private final String type;
|
||||
private final String id;
|
||||
|
||||
Persistable(String jobId, ToXContent object, String type, String id) {
|
||||
this.jobId = jobId;
|
||||
this.object = object;
|
||||
this.type = type;
|
||||
// TODO: (norelease): Fix the assertion tripping in internal engine for index requests without an id being retried:
|
||||
this.id = id != null ? id : UUIDs.base64UUID();
|
||||
}
|
||||
|
||||
boolean persist(String indexName) {
|
||||
if (object == null) {
|
||||
logger.warn("[{}] No {} to persist for job ", jobId, type);
|
||||
return false;
|
||||
}
|
||||
|
||||
logCall(indexName);
|
||||
|
||||
try {
|
||||
IndexRequest indexRequest = new IndexRequest(indexName, type, id)
|
||||
.source(toXContentBuilder(object));
|
||||
client.index(indexRequest).actionGet();
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("[{}] Error writing {}", new Object[]{jobId, type}), e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private void logCall(String indexName) {
|
||||
if (id != null) {
|
||||
logger.trace("[{}] ES API CALL: index type {} to index {} with ID {}", jobId, type, indexName, id);
|
||||
} else {
|
||||
logger.trace("[{}] ES API CALL: index type {} to index {} with auto-generated ID", jobId, type, indexName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.action.bulk.byscroll.DeleteByQueryRequest;
|
||||
import org.elasticsearch.action.bulk.byscroll.BulkByScrollResponse;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.xpack.ml.action.MlDeleteByQueryAction;
|
||||
|
||||
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class JobStorageDeletionTask extends Task {
|
||||
private final Logger logger;
|
||||
|
||||
public JobStorageDeletionTask(long id, String type, String action, String description, TaskId parentTask) {
|
||||
super(id, type, action, description, parentTask);
|
||||
this.logger = Loggers.getLogger(getClass());
|
||||
}
|
||||
|
||||
public void delete(String jobId, String indexName, Client client,
|
||||
CheckedConsumer<Boolean, Exception> finishedHandler,
|
||||
Consumer<Exception> failureHandler) {
|
||||
|
||||
// Step 2. Regardless of if the DBQ succeeds, we delete the physical index
|
||||
// -------
|
||||
CheckedConsumer<BulkByScrollResponse, Exception> dbqHandler = bulkByScrollResponse -> {
|
||||
if (bulkByScrollResponse.isTimedOut()) {
|
||||
logger.warn("DeleteByQuery for index [" + indexName + "] timed out. Continuing to delete index.");
|
||||
}
|
||||
if (!bulkByScrollResponse.getBulkFailures().isEmpty()) {
|
||||
logger.warn("[" + bulkByScrollResponse.getBulkFailures().size()
|
||||
+ "] failures encountered while running DeleteByQuery on index [" + indexName + "]. "
|
||||
+ "Continuing to delete index");
|
||||
}
|
||||
|
||||
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indexName);
|
||||
client.admin().indices().delete(deleteIndexRequest, ActionListener.wrap(deleteIndexResponse -> {
|
||||
logger.info("Deleting index [" + indexName + "] successful");
|
||||
|
||||
if (deleteIndexResponse.isAcknowledged()) {
|
||||
logger.info("Index deletion acknowledged");
|
||||
} else {
|
||||
logger.warn("Index deletion not acknowledged");
|
||||
}
|
||||
finishedHandler.accept(deleteIndexResponse.isAcknowledged());
|
||||
}, missingIndexHandler(indexName, finishedHandler, failureHandler)));
|
||||
};
|
||||
|
||||
// Step 1. DeleteByQuery on the index, matching all docs with the right job_id
|
||||
// -------
|
||||
SearchRequest searchRequest = new SearchRequest(indexName);
|
||||
searchRequest.source(new SearchSourceBuilder().query(new TermQueryBuilder("job_id", jobId)));
|
||||
DeleteByQueryRequest request = new DeleteByQueryRequest(searchRequest);
|
||||
request.setSlices(5);
|
||||
|
||||
client.execute(MlDeleteByQueryAction.INSTANCE, request,
|
||||
ActionListener.wrap(dbqHandler, missingIndexHandler(indexName, finishedHandler, failureHandler)));
|
||||
}
|
||||
|
||||
// If the index doesn't exist, we need to catch the exception and carry onwards so that the cluster
|
||||
// state is properly updated
|
||||
private Consumer<Exception> missingIndexHandler(String indexName, CheckedConsumer<Boolean, Exception> finishedHandler,
|
||||
Consumer<Exception> failureHandler) {
|
||||
return e -> {
|
||||
if (e instanceof IndexNotFoundException) {
|
||||
logger.warn("Physical index [" + indexName + "] not found. Continuing to delete job.");
|
||||
try {
|
||||
finishedHandler.accept(false);
|
||||
} catch (Exception e1) {
|
||||
failureHandler.accept(e1);
|
||||
}
|
||||
} else {
|
||||
// all other exceptions should die
|
||||
failureHandler.accept(e);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,152 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
/**
|
||||
* One time query builder for records. Sets default values for the following
|
||||
* parameters:
|
||||
* <ul>
|
||||
* <li>From- Skip the first N records. This parameter is for paging if not
|
||||
* required set to 0. Default = 0</li>
|
||||
* <li>Size- Take only this number of records. Default =
|
||||
* {@value DEFAULT_SIZE}</li>
|
||||
* <li>IncludeInterim- Include interim results. Default = false</li>
|
||||
* <li>SortField- The field to sort results by if <code>null</code> no sort is
|
||||
* applied. Default = null</li>
|
||||
* <li>SortDescending- Sort in descending order. Default = true</li>
|
||||
* <li>anomalyScoreThreshold- Return only buckets with an anomalyScore >=
|
||||
* this value. Default = 0.0</li>
|
||||
* <li>normalizedProbabilityThreshold. Return only buckets with a
|
||||
* maxNormalizedProbability >= this value. Default = 0.0</li>
|
||||
* <li>start- The start bucket time. A bucket with this timestamp will be
|
||||
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
|
||||
* returned. Default = -1</li>
|
||||
* <li>end- The end bucket timestamp buckets up to but NOT including this
|
||||
* timestamp are returned. If 0 all buckets from <code>startEpochMs</code> are
|
||||
* returned. Default = -1</li>
|
||||
* </ul>
|
||||
*/
|
||||
public final class RecordsQueryBuilder {
|
||||
|
||||
public static final int DEFAULT_SIZE = 100;
|
||||
|
||||
private RecordsQuery recordsQuery = new RecordsQuery();
|
||||
|
||||
public RecordsQueryBuilder from(int from) {
|
||||
recordsQuery.from = from;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder size(int size) {
|
||||
recordsQuery.size = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder epochStart(String startTime) {
|
||||
recordsQuery.start = startTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder epochEnd(String endTime) {
|
||||
recordsQuery.end = endTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder includeInterim(boolean include) {
|
||||
recordsQuery.includeInterim = include;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder sortField(String fieldname) {
|
||||
recordsQuery.sortField = fieldname;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder sortDescending(boolean sortDescending) {
|
||||
recordsQuery.sortDescending = sortDescending;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder anomalyScoreThreshold(double anomalyScoreFilter) {
|
||||
recordsQuery.anomalyScoreFilter = anomalyScoreFilter;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder normalizedProbability(double normalizedProbability) {
|
||||
recordsQuery.normalizedProbability = normalizedProbability;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQueryBuilder partitionFieldValue(String partitionFieldValue) {
|
||||
recordsQuery.partitionFieldValue = partitionFieldValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RecordsQuery build() {
|
||||
return recordsQuery;
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
recordsQuery = new RecordsQuery();
|
||||
}
|
||||
|
||||
public class RecordsQuery {
|
||||
|
||||
private int from = 0;
|
||||
private int size = DEFAULT_SIZE;
|
||||
private boolean includeInterim = false;
|
||||
private String sortField;
|
||||
private boolean sortDescending = true;
|
||||
private double anomalyScoreFilter = 0.0d;
|
||||
private double normalizedProbability = 0.0d;
|
||||
private String partitionFieldValue;
|
||||
private String start;
|
||||
private String end;
|
||||
|
||||
|
||||
public int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public boolean isIncludeInterim() {
|
||||
return includeInterim;
|
||||
}
|
||||
|
||||
public String getSortField() {
|
||||
return sortField;
|
||||
}
|
||||
|
||||
public boolean isSortDescending() {
|
||||
return sortDescending;
|
||||
}
|
||||
|
||||
public double getAnomalyScoreThreshold() {
|
||||
return anomalyScoreFilter;
|
||||
}
|
||||
|
||||
public double getNormalizedProbabilityThreshold() {
|
||||
return normalizedProbability;
|
||||
}
|
||||
|
||||
public String getPartitionFieldValue() {
|
||||
return partitionFieldValue;
|
||||
}
|
||||
|
||||
public int getFrom() {
|
||||
return from;
|
||||
}
|
||||
|
||||
public String getStart() {
|
||||
return start;
|
||||
}
|
||||
|
||||
public String getEnd() {
|
||||
return end;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,113 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.results.Result;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This builder facilitates the creation of a {@link QueryBuilder} with common
|
||||
* characteristics to both buckets and records.
|
||||
*/
|
||||
class ResultsFilterBuilder {
|
||||
private final List<QueryBuilder> queries;
|
||||
|
||||
ResultsFilterBuilder() {
|
||||
queries = new ArrayList<>();
|
||||
}
|
||||
|
||||
ResultsFilterBuilder(QueryBuilder queryBuilder) {
|
||||
this();
|
||||
queries.add(queryBuilder);
|
||||
}
|
||||
|
||||
ResultsFilterBuilder timeRange(String field, Object start, Object end) {
|
||||
if (start != null || end != null) {
|
||||
RangeQueryBuilder timeRange = QueryBuilders.rangeQuery(field);
|
||||
if (start != null) {
|
||||
timeRange.gte(start);
|
||||
}
|
||||
if (end != null) {
|
||||
timeRange.lt(end);
|
||||
}
|
||||
addQuery(timeRange);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsFilterBuilder timeRange(String field, String timestamp) {
|
||||
addQuery(QueryBuilders.matchQuery(field, timestamp));
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsFilterBuilder score(String fieldName, double threshold) {
|
||||
if (threshold > 0.0) {
|
||||
RangeQueryBuilder scoreFilter = QueryBuilders.rangeQuery(fieldName);
|
||||
scoreFilter.gte(threshold);
|
||||
addQuery(scoreFilter);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultsFilterBuilder interim(String fieldName, boolean includeInterim) {
|
||||
if (includeInterim) {
|
||||
// Including interim results does not stop final results being
|
||||
// shown, so including interim results means no filtering on the
|
||||
// isInterim field
|
||||
return this;
|
||||
}
|
||||
|
||||
// Implemented as "NOT isInterim == true" so that not present and null
|
||||
// are equivalent to false. This improves backwards compatibility.
|
||||
// Also, note how for a boolean field, unlike numeric term queries, the
|
||||
// term value is supplied as a string.
|
||||
TermQueryBuilder interimFilter = QueryBuilders.termQuery(fieldName,
|
||||
Boolean.TRUE.toString());
|
||||
QueryBuilder notInterimFilter = QueryBuilders.boolQuery().mustNot(interimFilter);
|
||||
addQuery(notInterimFilter);
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsFilterBuilder term(String fieldName, String fieldValue) {
|
||||
if (Strings.isNullOrEmpty(fieldName) || Strings.isNullOrEmpty(fieldValue)) {
|
||||
return this;
|
||||
}
|
||||
|
||||
TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery(fieldName, fieldValue);
|
||||
addQuery(termQueryBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
ResultsFilterBuilder resultType(String resultType) {
|
||||
return term(Result.RESULT_TYPE.getPreferredName(), resultType);
|
||||
}
|
||||
|
||||
private void addQuery(QueryBuilder fb) {
|
||||
queries.add(fb);
|
||||
}
|
||||
|
||||
public QueryBuilder build() {
|
||||
if (queries.isEmpty()) {
|
||||
return QueryBuilders.matchAllQuery();
|
||||
}
|
||||
if (queries.size() == 1) {
|
||||
return queries.get(0);
|
||||
}
|
||||
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
|
||||
for (QueryBuilder query : queries) {
|
||||
boolQueryBuilder.filter(query);
|
||||
}
|
||||
return boolQueryBuilder;
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user