Camel to snake case (elastic/elasticsearch#461)
Original commit: elastic/x-pack-elasticsearch@222065cc4c
This commit is contained in:
parent
b5c6970209
commit
98eb5534ee
|
@ -19,6 +19,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
|||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -68,8 +69,8 @@ public class DeleteModelSnapshotAction extends Action<DeleteModelSnapshotAction.
|
|||
}
|
||||
|
||||
public Request(String jobId, String snapshotId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, "jobId");
|
||||
this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, "snapshotId");
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, ModelSnapshot.SNAPSHOT_ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
|
@ -158,8 +159,8 @@ public class DeleteModelSnapshotAction extends Action<DeleteModelSnapshotAction.
|
|||
).results();
|
||||
|
||||
if (deleteCandidates.size() > 1) {
|
||||
logger.warn("More than one model found for [jobId: " + request.getJobId()
|
||||
+ ", snapshotId: " + request.getSnapshotId() + "] tuple.");
|
||||
logger.warn("More than one model found for [job_id: " + request.getJobId()
|
||||
+ ", snapshot_id: " + request.getSnapshotId() + "] tuple.");
|
||||
}
|
||||
|
||||
if (deleteCandidates.isEmpty()) {
|
||||
|
|
|
@ -67,12 +67,12 @@ public class GetBucketsAction extends Action<GetBucketsAction.Request, GetBucket
|
|||
public static class Request extends ActionRequest implements ToXContent {
|
||||
|
||||
public static final ParseField EXPAND = new ParseField("expand");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("includeInterim");
|
||||
public static final ParseField PARTITION_VALUE = new ParseField("partitionValue");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("include_interim");
|
||||
public static final ParseField PARTITION_VALUE = new ParseField("partition_value");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomalyScore");
|
||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("maxNormalizedProbability");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("max_normalized_probability");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
|
||||
private static final ObjectParser<Request, ParseFieldMatcherSupplier> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
|
|
@ -57,7 +57,7 @@ Action<GetCategoriesDefinitionAction.Request, GetCategoriesDefinitionAction.Resp
|
|||
|
||||
public static class Request extends ActionRequest {
|
||||
|
||||
public static final ParseField CATEGORY_ID = new ParseField("categoryId");
|
||||
public static final ParseField CATEGORY_ID = new ParseField("category_id");
|
||||
public static final ParseField FROM = new ParseField("from");
|
||||
public static final ParseField SIZE = new ParseField("size");
|
||||
|
||||
|
|
|
@ -64,8 +64,8 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
|
|||
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("includeInterim");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomalyScore");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("include_interim");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField SORT_FIELD = new ParseField("sort");
|
||||
public static final ParseField DESCENDING_SORT = new ParseField("desc");
|
||||
|
||||
|
|
|
@ -494,7 +494,7 @@ public class GetJobsAction extends Action<GetJobsAction.Request, GetJobsAction.R
|
|||
logger.debug(String.format(Locale.ROOT, "Cannot find job '%s'", request.getJobId()));
|
||||
throw QueryPage.emptyQueryPage(Job.RESULTS_FIELD);
|
||||
} else if (jobs.count() > 1) {
|
||||
logger.error(String.format(Locale.ROOT, "More than one job found for jobId [%s]", request.getJobId()));
|
||||
logger.error("More than one job found for {} [{}]", Job.ID.getPreferredName(), request.getJobId());
|
||||
}
|
||||
|
||||
logger.debug("Returning job [" + request.getJobId() + "]");
|
||||
|
|
|
@ -64,12 +64,12 @@ public class GetRecordsAction extends Action<GetRecordsAction.Request, GetRecord
|
|||
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("includeInterim");
|
||||
public static final ParseField ANOMALY_SCORE_FILTER = new ParseField("anomalyScore");
|
||||
public static final ParseField INCLUDE_INTERIM = new ParseField("include_interim");
|
||||
public static final ParseField ANOMALY_SCORE_FILTER = new ParseField("anomaly_score");
|
||||
public static final ParseField SORT = new ParseField("sort");
|
||||
public static final ParseField DESCENDING = new ParseField("desc");
|
||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("normalizedProbability");
|
||||
public static final ParseField PARTITION_VALUE = new ParseField("partitionValue");
|
||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("normalized_probability");
|
||||
public static final ParseField PARTITION_VALUE = new ParseField("partition_value");
|
||||
|
||||
private static final ObjectParser<Request, ParseFieldMatcherSupplier> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.prelert.PrelertPlugin;
|
||||
import org.elasticsearch.xpack.prelert.job.DataCounts;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.job.manager.AutodetectProcessManager;
|
||||
import org.elasticsearch.xpack.prelert.job.process.autodetect.params.DataLoadParams;
|
||||
import org.elasticsearch.xpack.prelert.job.process.autodetect.params.TimeRange;
|
||||
|
@ -136,9 +137,9 @@ public class PostDataAction extends Action<PostDataAction.Request, PostDataActio
|
|||
|
||||
public static class Request extends ActionRequest {
|
||||
|
||||
public static final ParseField IGNORE_DOWNTIME = new ParseField("ignoreDowntime");
|
||||
public static final ParseField RESET_START = new ParseField("resetStart");
|
||||
public static final ParseField RESET_END = new ParseField("resetEnd");
|
||||
public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime");
|
||||
public static final ParseField RESET_START = new ParseField("reset_start");
|
||||
public static final ParseField RESET_END = new ParseField("reset_end");
|
||||
|
||||
private String jobId;
|
||||
private boolean ignoreDowntime = false;
|
||||
|
@ -150,7 +151,7 @@ public class PostDataAction extends Action<PostDataAction.Request, PostDataActio
|
|||
}
|
||||
|
||||
public Request(String jobId) {
|
||||
ExceptionsHelper.requireNonNull(jobId, "jobId");
|
||||
ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.job.JobStatus;
|
||||
import org.elasticsearch.xpack.prelert.job.manager.JobManager;
|
||||
import org.elasticsearch.xpack.prelert.job.metadata.Allocation;
|
||||
|
@ -64,7 +65,7 @@ public class PostDataCloseAction extends Action<PostDataCloseAction.Request, Pos
|
|||
Request() {}
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, "jobId");
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
|
|
|
@ -59,10 +59,10 @@ PostDataFlushAction.RequestBuilder> {
|
|||
|
||||
public static class Request extends MasterNodeRequest<Request> implements ToXContent {
|
||||
|
||||
public static final ParseField CALC_INTERIM = new ParseField("calcInterim");
|
||||
public static final ParseField CALC_INTERIM = new ParseField("calc_interim");
|
||||
public static final ParseField START = new ParseField("start");
|
||||
public static final ParseField END = new ParseField("end");
|
||||
public static final ParseField ADVANCE_TIME = new ParseField("advanceTime");
|
||||
public static final ParseField ADVANCE_TIME = new ParseField("advance_time");
|
||||
|
||||
private static final ObjectParser<Request, ParseFieldMatcherSupplier> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ public class PutListAction extends Action<PutListAction.Request, PutListAction.R
|
|||
}
|
||||
|
||||
public Request(ListDocument listDocument) {
|
||||
this.listDocument = ExceptionsHelper.requireNonNull(listDocument, "listDocument");
|
||||
this.listDocument = ExceptionsHelper.requireNonNull(listDocument, "list_document");
|
||||
}
|
||||
|
||||
public ListDocument getListDocument() {
|
||||
|
|
|
@ -269,8 +269,8 @@ PutModelSnapshotDescriptionAction.RequestBuilder> {
|
|||
checkForClashes(request);
|
||||
|
||||
if (changeCandidates.size() > 1) {
|
||||
logger.warn("More than one model found for [jobId: " + request.getJobId() + ", snapshotId: " + request.getSnapshotId()
|
||||
+ "] tuple.");
|
||||
logger.warn("More than one model found for [{}: {}, {}: {}] tuple.", Job.ID.getPreferredName(), request.getJobId(),
|
||||
ModelSnapshot.SNAPSHOT_ID.getPreferredName(), request.getSnapshotId());
|
||||
}
|
||||
ModelSnapshot modelSnapshot = changeCandidates.get(0);
|
||||
modelSnapshot.setDescription(request.getDescriptionString());
|
||||
|
|
|
@ -81,9 +81,9 @@ extends Action<RevertModelSnapshotAction.Request, RevertModelSnapshotAction.Resp
|
|||
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
|
||||
|
||||
public static final ParseField TIME = new ParseField("time");
|
||||
public static final ParseField SNAPSHOT_ID = new ParseField("snapshotId");
|
||||
public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id");
|
||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||
public static final ParseField DELETE_INTERVENING = new ParseField("deleteInterveningResults");
|
||||
public static final ParseField DELETE_INTERVENING = new ParseField("delete_intervening_results");
|
||||
|
||||
private static ObjectParser<Request, ParseFieldMatcherSupplier> PARSER = new ObjectParser<>(NAME, Request::new);
|
||||
|
||||
|
|
|
@ -25,7 +25,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.job.manager.JobManager;
|
||||
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -55,7 +57,7 @@ extends Action<StopJobSchedulerAction.Request, StopJobSchedulerAction.Response,
|
|||
private String jobId;
|
||||
|
||||
public Request(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
}
|
||||
|
||||
Request() {
|
||||
|
|
|
@ -121,8 +121,8 @@ public class UpdateJobSchedulerStatusAction extends Action<UpdateJobSchedulerSta
|
|||
@Override
|
||||
public String toString() {
|
||||
return "Request{" +
|
||||
"jobId='" + jobId + '\'' +
|
||||
", schedulerStatus=" + schedulerStatus +
|
||||
Job.ID.getPreferredName() + "='" + jobId + "', " +
|
||||
SchedulerState.TYPE_FIELD.getPreferredName() + '=' + schedulerStatus +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,21 +48,21 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Serialisation names
|
||||
*/
|
||||
private static final ParseField ANALYSIS_CONFIG = new ParseField("analysisConfig");
|
||||
private static final ParseField BUCKET_SPAN = new ParseField("bucketSpan");
|
||||
private static final ParseField BATCH_SPAN = new ParseField("batchSpan");
|
||||
private static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorizationFieldName");
|
||||
private static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorizationFilters");
|
||||
private static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
|
||||
private static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
private static final ParseField BATCH_SPAN = new ParseField("batch_span");
|
||||
private static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name");
|
||||
private static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters");
|
||||
private static final ParseField LATENCY = new ParseField("latency");
|
||||
private static final ParseField PERIOD = new ParseField("period");
|
||||
private static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summaryCountFieldName");
|
||||
private static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name");
|
||||
private static final ParseField DETECTORS = new ParseField("detectors");
|
||||
private static final ParseField INFLUENCERS = new ParseField("influencers");
|
||||
private static final ParseField OVERLAPPING_BUCKETS = new ParseField("overlappingBuckets");
|
||||
private static final ParseField RESULT_FINALIZATION_WINDOW = new ParseField("resultFinalizationWindow");
|
||||
private static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariateByFields");
|
||||
private static final ParseField MULTIPLE_BUCKET_SPANS = new ParseField("multipleBucketSpans");
|
||||
private static final ParseField USER_PER_PARTITION_NORMALIZATION = new ParseField("usePerPartitionNormalization");
|
||||
private static final ParseField OVERLAPPING_BUCKETS = new ParseField("overlapping_buckets");
|
||||
private static final ParseField RESULT_FINALIZATION_WINDOW = new ParseField("result_finalization_window");
|
||||
private static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields");
|
||||
private static final ParseField MULTIPLE_BUCKET_SPANS = new ParseField("multiple_bucket_spans");
|
||||
private static final ParseField USER_PER_PARTITION_NORMALIZATION = new ParseField("use_per_partition_normalization");
|
||||
|
||||
private static final String PRELERT_CATEGORY_FIELD = "prelertcategory";
|
||||
public static final Set<String> AUTO_CREATED_FIELDS = new HashSet<>(Arrays.asList(PRELERT_CATEGORY_FIELD));
|
||||
|
|
|
@ -28,8 +28,8 @@ public class AnalysisLimits extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Serialisation field names
|
||||
*/
|
||||
public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("modelMemoryLimit");
|
||||
public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorizationExamplesLimit");
|
||||
public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit");
|
||||
public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorization_examples_limit");
|
||||
|
||||
public static final ConstructingObjectParser<AnalysisLimits, ParseFieldMatcherSupplier> PARSER = new ConstructingObjectParser<>(
|
||||
"analysis_limits", a -> new AnalysisLimits((Long) a[0], (Long) a[1]));
|
||||
|
|
|
@ -59,7 +59,7 @@ public class DataCounts extends ToXContentToBytes implements Writeable {
|
|||
public static final ParseField EARLIEST_RECORD_TIME = new ParseField(EARLIEST_RECORD_TIME_STR);
|
||||
public static final ParseField LATEST_RECORD_TIME = new ParseField(LATEST_RECORD_TIME_STR);
|
||||
|
||||
public static final ParseField TYPE = new ParseField("dataCounts");
|
||||
public static final ParseField TYPE = new ParseField("data_counts");
|
||||
|
||||
public static final ConstructingObjectParser<DataCounts, ParseFieldMatcherSupplier> PARSER =
|
||||
new ConstructingObjectParser<>("data_counts", a -> new DataCounts((String) a[0], (long) a[1], (long) a[2], (long) a[3],
|
||||
|
|
|
@ -87,12 +87,12 @@ public class DataDescription extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
}
|
||||
|
||||
private static final ParseField DATA_DESCRIPTION_FIELD = new ParseField("dataDescription");
|
||||
private static final ParseField DATA_DESCRIPTION_FIELD = new ParseField("data_description");
|
||||
private static final ParseField FORMAT_FIELD = new ParseField("format");
|
||||
private static final ParseField TIME_FIELD_NAME_FIELD = new ParseField("timeField");
|
||||
private static final ParseField TIME_FORMAT_FIELD = new ParseField("timeFormat");
|
||||
private static final ParseField FIELD_DELIMITER_FIELD = new ParseField("fieldDelimiter");
|
||||
private static final ParseField QUOTE_CHARACTER_FIELD = new ParseField("quoteCharacter");
|
||||
private static final ParseField TIME_FIELD_NAME_FIELD = new ParseField("time_field");
|
||||
private static final ParseField TIME_FORMAT_FIELD = new ParseField("time_format");
|
||||
private static final ParseField FIELD_DELIMITER_FIELD = new ParseField("field_delimiter");
|
||||
private static final ParseField QUOTE_CHARACTER_FIELD = new ParseField("quote_character");
|
||||
|
||||
/**
|
||||
* Special time format string for epoch times (seconds)
|
||||
|
|
|
@ -82,15 +82,15 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
|
||||
public static final ParseField DETECTOR_FIELD = new ParseField("detector");
|
||||
public static final ParseField DETECTOR_DESCRIPTION_FIELD = new ParseField("detectorDescription");
|
||||
public static final ParseField DETECTOR_DESCRIPTION_FIELD = new ParseField("detector_description");
|
||||
public static final ParseField FUNCTION_FIELD = new ParseField("function");
|
||||
public static final ParseField FIELD_NAME_FIELD = new ParseField("fieldName");
|
||||
public static final ParseField BY_FIELD_NAME_FIELD = new ParseField("byFieldName");
|
||||
public static final ParseField OVER_FIELD_NAME_FIELD = new ParseField("overFieldName");
|
||||
public static final ParseField PARTITION_FIELD_NAME_FIELD = new ParseField("partitionFieldName");
|
||||
public static final ParseField USE_NULL_FIELD = new ParseField("useNull");
|
||||
public static final ParseField EXCLUDE_FREQUENT_FIELD = new ParseField("excludeFrequent");
|
||||
public static final ParseField DETECTOR_RULES_FIELD = new ParseField("detectorRules");
|
||||
public static final ParseField FIELD_NAME_FIELD = new ParseField("field_name");
|
||||
public static final ParseField BY_FIELD_NAME_FIELD = new ParseField("by_field_name");
|
||||
public static final ParseField OVER_FIELD_NAME_FIELD = new ParseField("over_field_name");
|
||||
public static final ParseField PARTITION_FIELD_NAME_FIELD = new ParseField("partition_field_name");
|
||||
public static final ParseField USE_NULL_FIELD = new ParseField("use_null");
|
||||
public static final ParseField EXCLUDE_FREQUENT_FIELD = new ParseField("exclude_frequent");
|
||||
public static final ParseField DETECTOR_RULES_FIELD = new ParseField("detector_rules");
|
||||
|
||||
public static final ObjectParser<Builder, ParseFieldMatcherSupplier> PARSER = new ObjectParser<>("detector", Builder::new);
|
||||
|
||||
|
|
|
@ -56,25 +56,25 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
|
|||
/*
|
||||
* Field names used in serialization
|
||||
*/
|
||||
public static final ParseField ID = new ParseField("jobId");
|
||||
public static final ParseField ANALYSIS_CONFIG = new ParseField("analysisConfig");
|
||||
public static final ParseField ANALYSIS_LIMITS = new ParseField("analysisLimits");
|
||||
public static final ParseField CREATE_TIME = new ParseField("createTime");
|
||||
public static final ParseField CUSTOM_SETTINGS = new ParseField("customSettings");
|
||||
public static final ParseField DATA_DESCRIPTION = new ParseField("dataDescription");
|
||||
public static final ParseField ID = new ParseField("job_id");
|
||||
public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
|
||||
public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits");
|
||||
public static final ParseField CREATE_TIME = new ParseField("create_time");
|
||||
public static final ParseField CUSTOM_SETTINGS = new ParseField("custom_settings");
|
||||
public static final ParseField DATA_DESCRIPTION = new ParseField("data_description");
|
||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||
public static final ParseField FINISHED_TIME = new ParseField("finishedTime");
|
||||
public static final ParseField IGNORE_DOWNTIME = new ParseField("ignoreDowntime");
|
||||
public static final ParseField LAST_DATA_TIME = new ParseField("lastDataTime");
|
||||
public static final ParseField MODEL_DEBUG_CONFIG = new ParseField("modelDebugConfig");
|
||||
public static final ParseField SCHEDULER_CONFIG = new ParseField("schedulerConfig");
|
||||
public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalizationWindowDays");
|
||||
public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("backgroundPersistInterval");
|
||||
public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("modelSnapshotRetentionDays");
|
||||
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("resultsRetentionDays");
|
||||
public static final ParseField FINISHED_TIME = new ParseField("finished_time");
|
||||
public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime");
|
||||
public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time");
|
||||
public static final ParseField MODEL_DEBUG_CONFIG = new ParseField("model_debug_config");
|
||||
public static final ParseField SCHEDULER_CONFIG = new ParseField("scheduler_config");
|
||||
public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days");
|
||||
public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval");
|
||||
public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("model_snapshot_retention_days");
|
||||
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
|
||||
public static final ParseField TIMEOUT = new ParseField("timeout");
|
||||
public static final ParseField TRANSFORMS = new ParseField("transforms");
|
||||
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("modelSnapshotId");
|
||||
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("jobs");
|
||||
|
|
|
@ -67,9 +67,9 @@ public class ModelDebugConfig extends ToXContentToBytes implements Writeable {
|
|||
|
||||
private static final double MAX_PERCENTILE = 100.0;
|
||||
|
||||
private static final ParseField TYPE_FIELD = new ParseField("modelDebugConfig");
|
||||
private static final ParseField WRITE_TO_FIELD = new ParseField("writeTo");
|
||||
private static final ParseField BOUNDS_PERCENTILE_FIELD = new ParseField("boundsPercentile");
|
||||
private static final ParseField TYPE_FIELD = new ParseField("model_debug_config");
|
||||
private static final ParseField WRITE_TO_FIELD = new ParseField("write_to");
|
||||
private static final ParseField BOUNDS_PERCENTILE_FIELD = new ParseField("bounds_percentile");
|
||||
private static final ParseField TERMS_FIELD = new ParseField("terms");
|
||||
|
||||
public static final ConstructingObjectParser<ModelDebugConfig, ParseFieldMatcherSupplier> PARSER = new ConstructingObjectParser<>(
|
||||
|
|
|
@ -29,22 +29,21 @@ public class ModelSizeStats extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Field Names
|
||||
*/
|
||||
private static final ParseField MODEL_SIZE_STATS_FIELD = new ParseField("modelSizeStats");
|
||||
public static final ParseField JOB_ID = new ParseField("jobId");
|
||||
public static final ParseField MODEL_BYTES_FIELD = new ParseField("modelBytes");
|
||||
public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("totalByFieldCount");
|
||||
public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("totalOverFieldCount");
|
||||
public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("totalPartitionFieldCount");
|
||||
public static final ParseField BUCKET_ALLOCATION_FAILURES_COUNT_FIELD = new ParseField("bucketAllocationFailuresCount");
|
||||
public static final ParseField MEMORY_STATUS_FIELD = new ParseField("memoryStatus");
|
||||
public static final ParseField LOG_TIME_FIELD = new ParseField("logTime");
|
||||
private static final ParseField MODEL_SIZE_STATS_FIELD = new ParseField("model_size_stats");
|
||||
public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes");
|
||||
public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count");
|
||||
public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count");
|
||||
public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count");
|
||||
public static final ParseField BUCKET_ALLOCATION_FAILURES_COUNT_FIELD = new ParseField("bucket_allocation_failures_count");
|
||||
public static final ParseField MEMORY_STATUS_FIELD = new ParseField("memory_status");
|
||||
public static final ParseField LOG_TIME_FIELD = new ParseField("log_time");
|
||||
public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp");
|
||||
|
||||
public static final ConstructingObjectParser<Builder, ParseFieldMatcherSupplier> PARSER = new ConstructingObjectParser<>(
|
||||
MODEL_SIZE_STATS_FIELD.getPreferredName(), a -> new Builder((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD);
|
||||
PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD);
|
||||
PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD);
|
||||
|
@ -74,7 +73,7 @@ public class ModelSizeStats extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Elasticsearch type
|
||||
*/
|
||||
public static final ParseField TYPE = new ParseField("modelSizeStats");
|
||||
public static final ParseField TYPE = new ParseField("model_size_stats");
|
||||
|
||||
/**
|
||||
* The status of the memory monitored by the ResourceMonitor. OK is default,
|
||||
|
@ -183,7 +182,7 @@ public class ModelSizeStats extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
|
||||
public XContentBuilder doXContentBody(XContentBuilder builder) throws IOException {
|
||||
builder.field(JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes);
|
||||
builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount);
|
||||
builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount);
|
||||
|
|
|
@ -30,28 +30,27 @@ public class ModelSnapshot extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Field Names
|
||||
*/
|
||||
public static final ParseField JOB_ID = new ParseField("jobId");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||
public static final ParseField RESTORE_PRIORITY = new ParseField("restorePriority");
|
||||
public static final ParseField SNAPSHOT_ID = new ParseField("snapshotId");
|
||||
public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshotDocCount");
|
||||
public static final ParseField LATEST_RECORD_TIME = new ParseField("latestRecordTimeStamp");
|
||||
public static final ParseField LATEST_RESULT_TIME = new ParseField("latestResultTimeStamp");
|
||||
public static final ParseField RESTORE_PRIORITY = new ParseField("restore_priority");
|
||||
public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id");
|
||||
public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshot_doc_count");
|
||||
public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_time_stamp");
|
||||
public static final ParseField LATEST_RESULT_TIME = new ParseField("latest_result_time_stamp");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("modelSnapshots");
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("model_snapshots");
|
||||
|
||||
/**
|
||||
* Elasticsearch type
|
||||
*/
|
||||
public static final ParseField TYPE = new ParseField("modelSnapshot");
|
||||
public static final ParseField TYPE = new ParseField("model_snapshot");
|
||||
|
||||
public static final ConstructingObjectParser<ModelSnapshot, ParseFieldMatcherSupplier> PARSER =
|
||||
new ConstructingObjectParser<>(TYPE.getPreferredName(), a -> new ModelSnapshot((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareField(ModelSnapshot::setTimestamp, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
|
@ -161,7 +160,7 @@ public class ModelSnapshot extends ToXContentToBytes implements Writeable {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (timestamp != null) {
|
||||
builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime());
|
||||
}
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
package org.elasticsearch.xpack.prelert.job;
|
||||
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
||||
/**
|
||||
* The serialised models can get very large and only the C++ code
|
||||
* understands how to decode them, hence there is no reason to load
|
||||
|
@ -21,7 +23,7 @@ public class ModelState
|
|||
/**
|
||||
* The type of this class used when persisting the data
|
||||
*/
|
||||
public static final String TYPE = "modelState";
|
||||
public static final ParseField TYPE = new ParseField("model_state");
|
||||
|
||||
private ModelState()
|
||||
{
|
||||
|
|
|
@ -54,20 +54,20 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
|
|||
public static final String DOC_COUNT = "doc_count";
|
||||
|
||||
// NORELEASE: no camel casing:
|
||||
public static final ParseField DATA_SOURCE = new ParseField("dataSource");
|
||||
public static final ParseField QUERY_DELAY = new ParseField("queryDelay");
|
||||
public static final ParseField DATA_SOURCE = new ParseField("data_source");
|
||||
public static final ParseField QUERY_DELAY = new ParseField("query_delay");
|
||||
public static final ParseField FREQUENCY = new ParseField("frequency");
|
||||
public static final ParseField FILE_PATH = new ParseField("filePath");
|
||||
public static final ParseField TAIL_FILE = new ParseField("tailFile");
|
||||
public static final ParseField BASE_URL = new ParseField("baseUrl");
|
||||
public static final ParseField FILE_PATH = new ParseField("file_path");
|
||||
public static final ParseField TAIL_FILE = new ParseField("tail_file");
|
||||
public static final ParseField BASE_URL = new ParseField("base_url");
|
||||
public static final ParseField USERNAME = new ParseField("username");
|
||||
public static final ParseField PASSWORD = new ParseField("password");
|
||||
public static final ParseField ENCRYPTED_PASSWORD = new ParseField("encryptedPassword");
|
||||
public static final ParseField ENCRYPTED_PASSWORD = new ParseField("encrypted_password");
|
||||
public static final ParseField INDEXES = new ParseField("indexes");
|
||||
public static final ParseField TYPES = new ParseField("types");
|
||||
public static final ParseField QUERY = new ParseField("query");
|
||||
public static final ParseField RETRIEVE_WHOLE_SOURCE = new ParseField("retrieveWholeSource");
|
||||
public static final ParseField SCROLL_SIZE = new ParseField("scrollSize");
|
||||
public static final ParseField RETRIEVE_WHOLE_SOURCE = new ParseField("retrieve_whole_source");
|
||||
public static final ParseField SCROLL_SIZE = new ParseField("scroll_size");
|
||||
public static final ParseField AGGREGATIONS = new ParseField("aggregations");
|
||||
public static final ParseField AGGS = new ParseField("aggs");
|
||||
/**
|
||||
|
|
|
@ -23,8 +23,7 @@ import java.util.Objects;
|
|||
|
||||
public class SchedulerState extends ToXContentToBytes implements Writeable {
|
||||
|
||||
// NORELEASE: no camel casing:
|
||||
public static final ParseField TYPE_FIELD = new ParseField("schedulerState");
|
||||
public static final ParseField TYPE_FIELD = new ParseField("scheduler_state");
|
||||
public static final ParseField STATUS = new ParseField("status");
|
||||
public static final ParseField START_TIME_MILLIS = new ParseField("start");
|
||||
public static final ParseField END_TIME_MILLIS = new ParseField("end");
|
||||
|
|
|
@ -22,12 +22,12 @@ import java.util.Date;
|
|||
import java.util.Objects;
|
||||
|
||||
public class AuditActivity extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField TYPE = new ParseField("auditActivity");
|
||||
public static final ParseField TYPE = new ParseField("audit_activity");
|
||||
|
||||
public static final ParseField TOTAL_JOBS = new ParseField("totalJobs");
|
||||
public static final ParseField TOTAL_DETECTORS = new ParseField("totalDetectors");
|
||||
public static final ParseField RUNNING_JOBS = new ParseField("runningJobs");
|
||||
public static final ParseField RUNNING_DETECTORS = new ParseField("runningDetectors");
|
||||
public static final ParseField TOTAL_JOBS = new ParseField("total_jobs");
|
||||
public static final ParseField TOTAL_DETECTORS = new ParseField("total_detectors");
|
||||
public static final ParseField RUNNING_JOBS = new ParseField("running_jobs");
|
||||
public static final ParseField RUNNING_DETECTORS = new ParseField("running_detectors");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
|
||||
public static final ObjectParser<AuditActivity, ParseFieldMatcherSupplier> PARSER = new ObjectParser<>(TYPE.getPreferredName(),
|
||||
|
|
|
@ -24,7 +24,7 @@ import java.util.Date;
|
|||
import java.util.Objects;
|
||||
|
||||
public class AuditMessage extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField TYPE = new ParseField("auditMessage");
|
||||
public static final ParseField TYPE = new ParseField("audit_message");
|
||||
|
||||
public static final ParseField MESSAGE = new ParseField("message");
|
||||
public static final ParseField LEVEL = new ParseField("level");
|
||||
|
|
|
@ -25,11 +25,11 @@ import java.util.EnumSet;
|
|||
import java.util.Objects;
|
||||
|
||||
public class RuleCondition extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField CONDITION_TYPE_FIELD = new ParseField("conditionType");
|
||||
public static final ParseField RULE_CONDITION_FIELD = new ParseField("ruleCondition");
|
||||
public static final ParseField FIELD_NAME_FIELD = new ParseField("fieldName");
|
||||
public static final ParseField FIELD_VALUE_FIELD = new ParseField("fieldValue");
|
||||
public static final ParseField VALUE_LIST_FIELD = new ParseField("valueList");
|
||||
public static final ParseField CONDITION_TYPE_FIELD = new ParseField("condition_type");
|
||||
public static final ParseField RULE_CONDITION_FIELD = new ParseField("rule_condition");
|
||||
public static final ParseField FIELD_NAME_FIELD = new ParseField("field_name");
|
||||
public static final ParseField FIELD_VALUE_FIELD = new ParseField("field_value");
|
||||
public static final ParseField VALUE_LIST_FIELD = new ParseField("value_list");
|
||||
|
||||
public static final ConstructingObjectParser<RuleCondition, ParseFieldMatcherSupplier> PARSER =
|
||||
new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(),
|
||||
|
|
|
@ -56,7 +56,7 @@ public class CppLogMessage extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Elasticsearch type
|
||||
*/
|
||||
public static final ParseField TYPE = new ParseField("cppLogMessage");
|
||||
public static final ParseField TYPE = new ParseField("cpp_log_message");
|
||||
|
||||
private String logger = "";
|
||||
private Date timestamp;
|
||||
|
|
|
@ -125,7 +125,7 @@ public final class Messages
|
|||
public static final String JOB_CONFIG_OVERLAPPING_BUCKETS_INCOMPATIBLE_FUNCTION = "job.config.overlapping.buckets.incompatible."
|
||||
+ "function";
|
||||
public static final String JOB_CONFIG_OVERFIELD_NEEDS_ANOTHER = "job.config.overField.needs.another";
|
||||
public static final String JOB_CONFIG_MULTIPLE_BUCKETSPANS_REQUIRE_BUCKETSPAN = "job.config.multiple.bucketspans.require.bucketspan";
|
||||
public static final String JOB_CONFIG_MULTIPLE_BUCKETSPANS_REQUIRE_BUCKETSPAN = "job.config.multiple.bucketspans.require.bucket_span";
|
||||
public static final String JOB_CONFIG_MULTIPLE_BUCKETSPANS_MUST_BE_MULTIPLE = "job.config.multiple.bucketspans.must.be.multiple";
|
||||
public static final String JOB_CONFIG_PER_PARTITION_NORMALIZATION_REQUIRES_PARTITION_FIELD = "job.config.per.partition.normalisation."
|
||||
+ "requires.partition.field";
|
||||
|
|
|
@ -106,7 +106,7 @@ public class ElasticsearchBulkDeleter implements JobDataDeleter {
|
|||
// too big and has no mappings
|
||||
for (int i = 0; i < docCount; ++i) {
|
||||
String stateId = snapshotId + '_' + i;
|
||||
bulkRequestBuilder.add(client.prepareDelete(indexName, ModelState.TYPE, stateId));
|
||||
bulkRequestBuilder.add(client.prepareDelete(indexName, ModelState.TYPE.getPreferredName(), stateId));
|
||||
++deletedModelStateCount;
|
||||
}
|
||||
|
||||
|
|
|
@ -199,7 +199,7 @@ public class ElasticsearchJobProvider implements JobProvider {
|
|||
createIndexRequest.mapping(CategorizerState.TYPE, categorizerStateMapping);
|
||||
createIndexRequest.mapping(CategoryDefinition.TYPE.getPreferredName(), categoryDefinitionMapping);
|
||||
createIndexRequest.mapping(Quantiles.TYPE.getPreferredName(), quantilesMapping);
|
||||
createIndexRequest.mapping(ModelState.TYPE, modelStateMapping);
|
||||
createIndexRequest.mapping(ModelState.TYPE.getPreferredName(), modelStateMapping);
|
||||
createIndexRequest.mapping(ModelSnapshot.TYPE.getPreferredName(), modelSnapshotMapping);
|
||||
createIndexRequest.mapping(ModelSizeStats.TYPE.getPreferredName(), modelSizeStatsMapping);
|
||||
createIndexRequest.mapping(ModelDebugOutput.TYPE.getPreferredName(), modelDebugMapping);
|
||||
|
@ -925,7 +925,7 @@ public class ElasticsearchJobProvider implements JobProvider {
|
|||
|
||||
LOGGER.trace("ES API CALL: get ID {} type {} from index {}", docId, ModelState.TYPE, indexName);
|
||||
|
||||
GetResponse stateResponse = client.prepareGet(indexName, ModelState.TYPE, docId).get();
|
||||
GetResponse stateResponse = client.prepareGet(indexName, ModelState.TYPE.getPreferredName(), docId).get();
|
||||
if (!stateResponse.isExists()) {
|
||||
LOGGER.error("Expected {} documents for model state for {} snapshot {} but failed to find {}",
|
||||
numDocs, jobId, modelSnapshot.getSnapshotId(), docId);
|
||||
|
|
|
@ -487,7 +487,7 @@ public class ElasticsearchMappings {
|
|||
public static XContentBuilder modelStateMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(ModelState.TYPE)
|
||||
.startObject(ModelState.TYPE.getPreferredName())
|
||||
.field(ENABLED, false)
|
||||
.startObject(ALL)
|
||||
.field(ENABLED, false)
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.Objects;
|
|||
public final class QueryPage<T extends ToXContent & Writeable> extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public static final ParseField COUNT = new ParseField("count");
|
||||
public static final ParseField DEFAULT_RESULTS_FIELD = new ParseField("results_field");
|
||||
|
||||
private final ParseField resultsField;
|
||||
private final List<T> results;
|
||||
|
@ -37,7 +38,7 @@ public final class QueryPage<T extends ToXContent & Writeable> extends ToXConten
|
|||
public QueryPage(List<T> results, long count, ParseField resultsField) {
|
||||
this.results = results;
|
||||
this.count = count;
|
||||
this.resultsField = ExceptionsHelper.requireNonNull(resultsField, "resultsField");
|
||||
this.resultsField = ExceptionsHelper.requireNonNull(resultsField, DEFAULT_RESULTS_FIELD.getPreferredName());
|
||||
}
|
||||
|
||||
public QueryPage(StreamInput in, Reader<T> hitReader) throws IOException {
|
||||
|
|
|
@ -108,7 +108,7 @@ public class InterimResultsParams {
|
|||
|
||||
private Long checkAdvanceTimeParam() {
|
||||
if (advanceTime != null && !advanceTime.isEmpty()) {
|
||||
return paramToEpochIfValidOrThrow("advanceTime", advanceTime) / TimeRange.MILLISECONDS_IN_SECOND;
|
||||
return paramToEpochIfValidOrThrow("advance_time", advanceTime) / TimeRange.MILLISECONDS_IN_SECOND;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
|
@ -28,9 +29,8 @@ public class Quantiles extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Field Names
|
||||
*/
|
||||
public static final ParseField JOB_ID = new ParseField("jobId");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
public static final ParseField QUANTILE_STATE = new ParseField("quantileState");
|
||||
public static final ParseField QUANTILE_STATE = new ParseField("quantile_state");
|
||||
|
||||
/**
|
||||
* Elasticsearch type
|
||||
|
@ -41,7 +41,7 @@ public class Quantiles extends ToXContentToBytes implements Writeable {
|
|||
TYPE.getPreferredName(), a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE);
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ public class Quantiles extends ToXContentToBytes implements Writeable {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (timestamp != null) {
|
||||
builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime());
|
||||
}
|
||||
|
|
|
@ -23,20 +23,20 @@ import java.util.Objects;
|
|||
*/
|
||||
public class AnomalyCause extends ToXContentToBytes implements Writeable
|
||||
{
|
||||
public static final ParseField ANOMALY_CAUSE = new ParseField("anomalyCause");
|
||||
public static final ParseField ANOMALY_CAUSE = new ParseField("anomaly_cause");
|
||||
/**
|
||||
* Result fields
|
||||
*/
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField OVER_FIELD_NAME = new ParseField("overFieldName");
|
||||
public static final ParseField OVER_FIELD_VALUE = new ParseField("overFieldValue");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("byFieldName");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("byFieldValue");
|
||||
public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlatedByFieldValue");
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partitionFieldName");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partitionFieldValue");
|
||||
public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name");
|
||||
public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
|
||||
public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value");
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value");
|
||||
public static final ParseField FUNCTION = new ParseField("function");
|
||||
public static final ParseField FUNCTION_DESCRIPTION = new ParseField("functionDescription");
|
||||
public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description");
|
||||
public static final ParseField TYPICAL = new ParseField("typical");
|
||||
public static final ParseField ACTUAL = new ParseField("actual");
|
||||
public static final ParseField INFLUENCERS = new ParseField("influencers");
|
||||
|
@ -44,7 +44,7 @@ public class AnomalyCause extends ToXContentToBytes implements Writeable
|
|||
/**
|
||||
* Metric Results
|
||||
*/
|
||||
public static final ParseField FIELD_NAME = new ParseField("fieldName");
|
||||
public static final ParseField FIELD_NAME = new ParseField("field_name");
|
||||
|
||||
public static final ObjectParser<AnomalyCause, ParseFieldMatcherSupplier> PARSER = new ObjectParser<>(ANOMALY_CAUSE.getPreferredName(),
|
||||
AnomalyCause::new);
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.utils.time.TimeUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -37,21 +38,20 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Result fields (all detector types)
|
||||
*/
|
||||
public static final ParseField JOB_ID = new ParseField("jobId");
|
||||
public static final ParseField DETECTOR_INDEX = new ParseField("detectorIndex");
|
||||
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("byFieldName");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("byFieldValue");
|
||||
public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlatedByFieldValue");
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partitionFieldName");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partitionFieldValue");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
|
||||
public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value");
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value");
|
||||
public static final ParseField FUNCTION = new ParseField("function");
|
||||
public static final ParseField FUNCTION_DESCRIPTION = new ParseField("functionDescription");
|
||||
public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description");
|
||||
public static final ParseField TYPICAL = new ParseField("typical");
|
||||
public static final ParseField ACTUAL = new ParseField("actual");
|
||||
public static final ParseField IS_INTERIM = new ParseField("isInterim");
|
||||
public static final ParseField IS_INTERIM = new ParseField("is_interim");
|
||||
public static final ParseField INFLUENCERS = new ParseField("influencers");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucketSpan");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
|
||||
// Used for QueryPage
|
||||
|
@ -60,27 +60,27 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Metric Results (including population metrics)
|
||||
*/
|
||||
public static final ParseField FIELD_NAME = new ParseField("fieldName");
|
||||
public static final ParseField FIELD_NAME = new ParseField("field_name");
|
||||
|
||||
/**
|
||||
* Population results
|
||||
*/
|
||||
public static final ParseField OVER_FIELD_NAME = new ParseField("overFieldName");
|
||||
public static final ParseField OVER_FIELD_VALUE = new ParseField("overFieldValue");
|
||||
public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name");
|
||||
public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value");
|
||||
public static final ParseField CAUSES = new ParseField("causes");
|
||||
|
||||
/**
|
||||
* Normalisation
|
||||
*/
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomalyScore");
|
||||
public static final ParseField NORMALIZED_PROBABILITY = new ParseField("normalizedProbability");
|
||||
public static final ParseField INITIAL_NORMALIZED_PROBABILITY = new ParseField("initialNormalizedProbability");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField NORMALIZED_PROBABILITY = new ParseField("normalized_probability");
|
||||
public static final ParseField INITIAL_NORMALIZED_PROBABILITY = new ParseField("initial_normalized_probability");
|
||||
|
||||
public static final ConstructingObjectParser<AnomalyRecord, ParseFieldMatcherSupplier> PARSER =
|
||||
new ConstructingObjectParser<>(RESULT_TYPE_VALUE, a -> new AnomalyRecord((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
|
||||
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
|
||||
PARSER.declareDouble(AnomalyRecord::setAnomalyScore, ANOMALY_SCORE);
|
||||
|
@ -242,7 +242,7 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
builder.field(PROBABILITY.getPreferredName(), probability);
|
||||
builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore);
|
||||
|
|
|
@ -39,17 +39,17 @@ public class Bucket extends ToXContentToBytes implements Writeable {
|
|||
public static final ParseField JOB_ID = Job.ID;
|
||||
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomalyScore");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initialAnomalyScore");
|
||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("maxNormalizedProbability");
|
||||
public static final ParseField IS_INTERIM = new ParseField("isInterim");
|
||||
public static final ParseField RECORD_COUNT = new ParseField("recordCount");
|
||||
public static final ParseField EVENT_COUNT = new ParseField("eventCount");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score");
|
||||
public static final ParseField MAX_NORMALIZED_PROBABILITY = new ParseField("max_normalized_probability");
|
||||
public static final ParseField IS_INTERIM = new ParseField("is_interim");
|
||||
public static final ParseField RECORD_COUNT = new ParseField("record_count");
|
||||
public static final ParseField EVENT_COUNT = new ParseField("event_count");
|
||||
public static final ParseField RECORDS = new ParseField("records");
|
||||
public static final ParseField BUCKET_INFLUENCERS = new ParseField("bucketInfluencers");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucketSpan");
|
||||
public static final ParseField PROCESSING_TIME_MS = new ParseField("processingTimeMs");
|
||||
public static final ParseField PARTITION_SCORES = new ParseField("partitionScores");
|
||||
public static final ParseField BUCKET_INFLUENCERS = new ParseField("bucket_influencers");
|
||||
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms");
|
||||
public static final ParseField PARTITION_SCORES = new ParseField("partition_scores");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("buckets");
|
||||
|
|
|
@ -32,13 +32,12 @@ public class BucketInfluencer extends ToXContentToBytes implements Writeable {
|
|||
/*
|
||||
* Field names
|
||||
*/
|
||||
public static final ParseField JOB_ID = new ParseField("jobId");
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencerFieldName");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initialAnomalyScore");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomalyScore");
|
||||
public static final ParseField RAW_ANOMALY_SCORE = new ParseField("rawAnomalyScore");
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
public static final ParseField RAW_ANOMALY_SCORE = new ParseField("raw_anomaly_score");
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField IS_INTERIM = new ParseField("isInterim");
|
||||
public static final ParseField IS_INTERIM = new ParseField("is_interim");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
|
||||
public static final ConstructingObjectParser<BucketInfluencer, ParseFieldMatcherSupplier> PARSER =
|
||||
|
|
|
@ -25,11 +25,11 @@ import java.util.TreeSet;
|
|||
|
||||
public class CategoryDefinition extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public static final ParseField TYPE = new ParseField("categoryDefinition");
|
||||
public static final ParseField CATEGORY_ID = new ParseField("categoryId");
|
||||
public static final ParseField TYPE = new ParseField("category_definition");
|
||||
public static final ParseField CATEGORY_ID = new ParseField("category_id");
|
||||
public static final ParseField TERMS = new ParseField("terms");
|
||||
public static final ParseField REGEX = new ParseField("regex");
|
||||
public static final ParseField MAX_MATCHING_LENGTH = new ParseField("maxMatchingLength");
|
||||
public static final ParseField MAX_MATCHING_LENGTH = new ParseField("max_matching_length");
|
||||
public static final ParseField EXAMPLES = new ParseField("examples");
|
||||
|
||||
// Used for QueryPage
|
||||
|
|
|
@ -27,8 +27,8 @@ public class Influence extends ToXContentToBytes implements Writeable {
|
|||
* Note all publicly exposed field names are "influencer" not "influence"
|
||||
*/
|
||||
public static final ParseField INFLUENCER = new ParseField("influencer");
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencerFieldName");
|
||||
public static final ParseField INFLUENCER_FIELD_VALUES = new ParseField("influencerFieldValues");
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
|
||||
public static final ParseField INFLUENCER_FIELD_VALUES = new ParseField("influencer_field_values");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<Influence, ParseFieldMatcherSupplier> PARSER = new ConstructingObjectParser<>(
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.utils.time.TimeUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
@ -31,13 +32,12 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
|||
/*
|
||||
* Field names
|
||||
*/
|
||||
public static final ParseField JOB_ID = new ParseField("jobId");
|
||||
public static final ParseField PROBABILITY = new ParseField("probability");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencerFieldName");
|
||||
public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencerFieldValue");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initialAnomalyScore");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomalyScore");
|
||||
public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name");
|
||||
public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencer_field_value");
|
||||
public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score");
|
||||
public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score");
|
||||
|
||||
// Used for QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("influencers");
|
||||
|
@ -46,7 +46,7 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
|||
RESULT_TYPE_FIELD.getPreferredName(), a -> new Influencer((String) a[0], (String) a[1], (String) a[2]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE);
|
||||
PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE);
|
||||
|
@ -117,7 +117,7 @@ public class Influencer extends ToXContentToBytes implements Writeable {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
||||
builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField);
|
||||
builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue);
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.utils.time.TimeUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import java.io.IOException;
|
||||
|
@ -28,26 +29,25 @@ import java.util.Objects;
|
|||
*/
|
||||
public class ModelDebugOutput extends ToXContentToBytes implements Writeable
|
||||
{
|
||||
public static final ParseField TYPE = new ParseField("modelDebugOutput");
|
||||
public static final ParseField JOB_ID = new ParseField("jobId");
|
||||
public static final ParseField TYPE = new ParseField("model_debug_output");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partitionFieldName");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partitionFieldValue");
|
||||
public static final ParseField OVER_FIELD_NAME = new ParseField("overFieldName");
|
||||
public static final ParseField OVER_FIELD_VALUE = new ParseField("overFieldValue");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("byFieldName");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("byFieldValue");
|
||||
public static final ParseField DEBUG_FEATURE = new ParseField("debugFeature");
|
||||
public static final ParseField DEBUG_LOWER = new ParseField("debugLower");
|
||||
public static final ParseField DEBUG_UPPER = new ParseField("debugUpper");
|
||||
public static final ParseField DEBUG_MEDIAN = new ParseField("debugMedian");
|
||||
public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name");
|
||||
public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value");
|
||||
public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name");
|
||||
public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value");
|
||||
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
|
||||
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
|
||||
public static final ParseField DEBUG_FEATURE = new ParseField("debug_feature");
|
||||
public static final ParseField DEBUG_LOWER = new ParseField("debug_lower");
|
||||
public static final ParseField DEBUG_UPPER = new ParseField("debug_upper");
|
||||
public static final ParseField DEBUG_MEDIAN = new ParseField("debug_median");
|
||||
public static final ParseField ACTUAL = new ParseField("actual");
|
||||
|
||||
public static final ConstructingObjectParser<ModelDebugOutput, ParseFieldMatcherSupplier> PARSER =
|
||||
new ConstructingObjectParser<>(TYPE.getPreferredName(), a -> new ModelDebugOutput((String) a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), JOB_ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareField(ModelDebugOutput::setTimestamp, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
|
@ -132,7 +132,7 @@ public class ModelDebugOutput extends ToXContentToBytes implements Writeable
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(JOB_ID.getPreferredName(), jobId);
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (timestamp != null) {
|
||||
builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime());
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ import java.io.IOException;
|
|||
import java.util.Objects;
|
||||
|
||||
public class PartitionScore extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField PARTITION_SCORE = new ParseField("partitionScore");
|
||||
public static final ParseField PARTITION_SCORE = new ParseField("partition_score");
|
||||
|
||||
private String partitionFieldValue;
|
||||
private String partitionFieldName;
|
||||
|
|
|
@ -38,12 +38,12 @@ public class PerPartitionMaxProbabilities extends ToXContentToBytes implements W
|
|||
/**
|
||||
* Result type
|
||||
*/
|
||||
public static final String RESULT_TYPE_VALUE = "partitionNormalizedProbs";
|
||||
public static final String RESULT_TYPE_VALUE = "partition_normalized_probs";
|
||||
|
||||
/*
|
||||
* Field Names
|
||||
*/
|
||||
public static final ParseField PER_PARTITION_MAX_PROBABILITIES = new ParseField("perPartitionMaxProbabilities");
|
||||
public static final ParseField PER_PARTITION_MAX_PROBABILITIES = new ParseField("per_partition_max_probabilities");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<PerPartitionMaxProbabilities, ParseFieldMatcherSupplier> PARSER =
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.prelert.job.results;
|
||||
|
||||
import org.elasticsearch.xpack.prelert.job.DataCounts;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.job.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.prelert.job.ModelSnapshot;
|
||||
import org.elasticsearch.xpack.prelert.job.quantiles.Quantiles;
|
||||
|
@ -27,7 +28,7 @@ public final class ReservedFieldNames {
|
|||
/**
|
||||
* jobId isn't in this package, so redefine.
|
||||
*/
|
||||
private static final String JOB_ID_NAME = "jobId";
|
||||
private static final String JOB_ID_NAME = Job.ID.getPreferredName();
|
||||
|
||||
/**
|
||||
* @timestamp isn't in this package, so redefine.
|
||||
|
|
|
@ -30,7 +30,7 @@ public class RestPostDataAction extends BaseRestHandler {
|
|||
public RestPostDataAction(Settings settings, RestController controller, PostDataAction.TransportAction transportPostDataAction) {
|
||||
super(settings);
|
||||
this.transportPostDataAction = transportPostDataAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, PrelertPlugin.BASE_PATH + "data/{jobId}", this);
|
||||
controller.registerHandler(RestRequest.Method.POST, PrelertPlugin.BASE_PATH + "data/{" + Job.ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -15,13 +15,12 @@ import org.elasticsearch.rest.RestRequest;
|
|||
import org.elasticsearch.rest.action.AcknowledgedRestListener;
|
||||
import org.elasticsearch.xpack.prelert.PrelertPlugin;
|
||||
import org.elasticsearch.xpack.prelert.action.PostDataCloseAction;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestPostDataCloseAction extends BaseRestHandler {
|
||||
|
||||
private static final ParseField JOB_ID = new ParseField("jobId");
|
||||
|
||||
private final PostDataCloseAction.TransportAction transportPostDataCloseAction;
|
||||
|
||||
@Inject
|
||||
|
@ -29,12 +28,14 @@ public class RestPostDataCloseAction extends BaseRestHandler {
|
|||
PostDataCloseAction.TransportAction transportPostDataCloseAction) {
|
||||
super(settings);
|
||||
this.transportPostDataCloseAction = transportPostDataCloseAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, PrelertPlugin.BASE_PATH + "data/{jobId}/_close", this);
|
||||
controller.registerHandler(RestRequest.Method.POST, PrelertPlugin.BASE_PATH
|
||||
+ "data/{" + Job.ID.getPreferredName() + "}/_close", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
PostDataCloseAction.Request postDataCloseRequest = new PostDataCloseAction.Request(restRequest.param(JOB_ID.getPreferredName()));
|
||||
PostDataCloseAction.Request postDataCloseRequest = new PostDataCloseAction.Request(
|
||||
restRequest.param(Job.ID.getPreferredName()));
|
||||
|
||||
return channel -> transportPostDataCloseAction.execute(postDataCloseRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
|
|
@ -15,14 +15,13 @@ import org.elasticsearch.rest.RestRequest;
|
|||
import org.elasticsearch.rest.action.AcknowledgedRestListener;
|
||||
import org.elasticsearch.xpack.prelert.PrelertPlugin;
|
||||
import org.elasticsearch.xpack.prelert.action.DeleteModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.job.ModelSnapshot;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestDeleteModelSnapshotAction extends BaseRestHandler {
|
||||
|
||||
private static final ParseField JOB_ID = new ParseField("jobId");
|
||||
private static final ParseField SNAPSHOT_ID = new ParseField("snapshotId");
|
||||
|
||||
private final DeleteModelSnapshotAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
|
@ -30,13 +29,15 @@ public class RestDeleteModelSnapshotAction extends BaseRestHandler {
|
|||
DeleteModelSnapshotAction.TransportAction transportAction) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.DELETE, PrelertPlugin.BASE_PATH + "modelsnapshots/{jobId}/{snapshotId}", this);
|
||||
controller.registerHandler(RestRequest.Method.DELETE, PrelertPlugin.BASE_PATH + "modelsnapshots/{"
|
||||
+ Job.ID.getPreferredName() + "}/{" + ModelSnapshot.SNAPSHOT_ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
DeleteModelSnapshotAction.Request deleteModelSnapshot = new DeleteModelSnapshotAction.Request(
|
||||
restRequest.param(JOB_ID.getPreferredName()), restRequest.param(SNAPSHOT_ID.getPreferredName()));
|
||||
restRequest.param(Job.ID.getPreferredName()),
|
||||
restRequest.param(ModelSnapshot.SNAPSHOT_ID.getPreferredName()));
|
||||
|
||||
return channel -> transportAction.execute(deleteModelSnapshot, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
|
|
@ -19,21 +19,14 @@ import org.elasticsearch.rest.action.RestActions;
|
|||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.prelert.PrelertPlugin;
|
||||
import org.elasticsearch.xpack.prelert.action.GetModelSnapshotsAction;
|
||||
import org.elasticsearch.xpack.prelert.action.GetModelSnapshotsAction.Request;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.job.results.PageParams;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestGetModelSnapshotsAction extends BaseRestHandler {
|
||||
|
||||
private static final ParseField JOB_ID = new ParseField("jobId");
|
||||
private static final ParseField SORT = new ParseField("sort");
|
||||
private static final ParseField DESC_ORDER = new ParseField("desc");
|
||||
private static final ParseField SIZE = new ParseField("size");
|
||||
private static final ParseField FROM = new ParseField("from");
|
||||
private static final ParseField START = new ParseField("start");
|
||||
private static final ParseField END = new ParseField("end");
|
||||
private static final ParseField DESCRIPTION = new ParseField("description");
|
||||
|
||||
// Even though these are null, setting up the defaults in case
|
||||
// we want to change them later
|
||||
private final String DEFAULT_SORT = null;
|
||||
|
@ -49,34 +42,37 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler {
|
|||
GetModelSnapshotsAction.TransportAction transportGetModelSnapshotsAction) {
|
||||
super(settings);
|
||||
this.transportGetModelSnapshotsAction = transportGetModelSnapshotsAction;
|
||||
controller.registerHandler(RestRequest.Method.GET, PrelertPlugin.BASE_PATH + "modelsnapshots/{jobId}", this);
|
||||
controller.registerHandler(RestRequest.Method.GET, PrelertPlugin.BASE_PATH + "modelsnapshots/{"
|
||||
+ Job.ID.getPreferredName() + "}", this);
|
||||
// endpoints that support body parameters must also accept POST
|
||||
controller.registerHandler(RestRequest.Method.POST, PrelertPlugin.BASE_PATH + "modelsnapshots/{jobId}", this);
|
||||
controller.registerHandler(RestRequest.Method.POST, PrelertPlugin.BASE_PATH + "modelsnapshots/{"
|
||||
+ Job.ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
String jobId = restRequest.param(JOB_ID.getPreferredName());
|
||||
GetModelSnapshotsAction.Request getModelSnapshots;
|
||||
String jobId = restRequest.param(Job.ID.getPreferredName());
|
||||
Request getModelSnapshots;
|
||||
if (RestActions.hasBodyContent(restRequest)) {
|
||||
BytesReference bodyBytes = RestActions.getRestContent(restRequest);
|
||||
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
|
||||
getModelSnapshots = GetModelSnapshotsAction.Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
|
||||
getModelSnapshots = Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
|
||||
} else {
|
||||
getModelSnapshots = new GetModelSnapshotsAction.Request(jobId);
|
||||
getModelSnapshots.setSort(restRequest.param(SORT.getPreferredName(), DEFAULT_SORT));
|
||||
if (restRequest.hasParam(START.getPreferredName())) {
|
||||
getModelSnapshots.setStart(restRequest.param(START.getPreferredName(), DEFAULT_START));
|
||||
getModelSnapshots = new Request(jobId);
|
||||
getModelSnapshots.setSort(restRequest.param(Request.SORT.getPreferredName(), DEFAULT_SORT));
|
||||
if (restRequest.hasParam(Request.START.getPreferredName())) {
|
||||
getModelSnapshots.setStart(restRequest.param(Request.START.getPreferredName(), DEFAULT_START));
|
||||
}
|
||||
if (restRequest.hasParam(END.getPreferredName())) {
|
||||
getModelSnapshots.setEnd(restRequest.param(END.getPreferredName(), DEFAULT_END));
|
||||
if (restRequest.hasParam(Request.END.getPreferredName())) {
|
||||
getModelSnapshots.setEnd(restRequest.param(Request.END.getPreferredName(), DEFAULT_END));
|
||||
}
|
||||
if (restRequest.hasParam(DESCRIPTION.getPreferredName())) {
|
||||
getModelSnapshots.setDescriptionString(restRequest.param(DESCRIPTION.getPreferredName(), DEFAULT_DESCRIPTION));
|
||||
if (restRequest.hasParam(Request.DESCRIPTION.getPreferredName())) {
|
||||
getModelSnapshots.setDescriptionString(restRequest.param(Request.DESCRIPTION.getPreferredName(), DEFAULT_DESCRIPTION));
|
||||
}
|
||||
getModelSnapshots.setDescOrder(restRequest.paramAsBoolean(DESC_ORDER.getPreferredName(), DEFAULT_DESC_ORDER));
|
||||
getModelSnapshots.setPageParams(new PageParams(restRequest.paramAsInt(FROM.getPreferredName(), PageParams.DEFAULT_FROM),
|
||||
restRequest.paramAsInt(SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)));
|
||||
getModelSnapshots.setDescOrder(restRequest.paramAsBoolean(Request.DESC.getPreferredName(), DEFAULT_DESC_ORDER));
|
||||
getModelSnapshots.setPageParams(new PageParams(
|
||||
restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM),
|
||||
restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)));
|
||||
}
|
||||
|
||||
return channel -> transportGetModelSnapshotsAction.execute(getModelSnapshots, new RestToXContentListener<>(channel));
|
||||
|
|
|
@ -18,14 +18,16 @@ import org.elasticsearch.rest.RestRequest;
|
|||
import org.elasticsearch.rest.action.RestActions;
|
||||
import org.elasticsearch.rest.action.RestStatusToXContentListener;
|
||||
import org.elasticsearch.xpack.prelert.PrelertPlugin;
|
||||
import org.elasticsearch.xpack.prelert.action.DeleteModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.prelert.action.PutModelSnapshotDescriptionAction;
|
||||
import org.elasticsearch.xpack.prelert.action.RevertModelSnapshotAction;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
import org.elasticsearch.xpack.prelert.job.ModelSnapshot;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestPutModelSnapshotDescriptionAction extends BaseRestHandler {
|
||||
|
||||
private static final ParseField JOB_ID = new ParseField("jobId");
|
||||
private static final ParseField SNAPSHOT_ID = new ParseField("snapshotId");
|
||||
|
||||
private final PutModelSnapshotDescriptionAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
|
@ -35,7 +37,8 @@ public class RestPutModelSnapshotDescriptionAction extends BaseRestHandler {
|
|||
this.transportAction = transportAction;
|
||||
|
||||
// NORELEASE: should be a POST action
|
||||
controller.registerHandler(RestRequest.Method.PUT, PrelertPlugin.BASE_PATH + "modelsnapshots/{jobId}/{snapshotId}/description",
|
||||
controller.registerHandler(RestRequest.Method.PUT, PrelertPlugin.BASE_PATH + "modelsnapshots/{"
|
||||
+ Job.ID.getPreferredName() + "}/{" + ModelSnapshot.SNAPSHOT_ID +"}/description",
|
||||
this);
|
||||
}
|
||||
|
||||
|
@ -44,8 +47,8 @@ public class RestPutModelSnapshotDescriptionAction extends BaseRestHandler {
|
|||
BytesReference bodyBytes = RestActions.getRestContent(restRequest);
|
||||
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
|
||||
PutModelSnapshotDescriptionAction.Request getModelSnapshots = PutModelSnapshotDescriptionAction.Request.parseRequest(
|
||||
restRequest.param(JOB_ID.getPreferredName()),
|
||||
restRequest.param(SNAPSHOT_ID.getPreferredName()),
|
||||
restRequest.param(Job.ID.getPreferredName()),
|
||||
restRequest.param(ModelSnapshot.SNAPSHOT_ID.getPreferredName()),
|
||||
parser, () -> parseFieldMatcher
|
||||
);
|
||||
|
||||
|
|
|
@ -30,9 +30,10 @@ public class RestGetCategoriesAction extends BaseRestHandler {
|
|||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.GET,
|
||||
PrelertPlugin.BASE_PATH + "results/{jobId}/categorydefinitions/{categoryId}", this);
|
||||
PrelertPlugin.BASE_PATH + "results/{" + Job.ID.getPreferredName() + "}/categorydefinitions/{"
|
||||
+ Request.CATEGORY_ID.getPreferredName() + "}", this);
|
||||
controller.registerHandler(RestRequest.Method.GET,
|
||||
PrelertPlugin.BASE_PATH + "results/{jobId}/categorydefinitions", this);
|
||||
PrelertPlugin.BASE_PATH + "results/{" + Job.ID.getPreferredName() + "}/categorydefinitions", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -15,13 +15,12 @@ import org.elasticsearch.rest.RestRequest;
|
|||
import org.elasticsearch.rest.action.AcknowledgedRestListener;
|
||||
import org.elasticsearch.xpack.prelert.PrelertPlugin;
|
||||
import org.elasticsearch.xpack.prelert.action.StopJobSchedulerAction;
|
||||
import org.elasticsearch.xpack.prelert.job.Job;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestStopJobSchedulerAction extends BaseRestHandler {
|
||||
|
||||
private static final ParseField JOB_ID = new ParseField("jobId");
|
||||
|
||||
private final StopJobSchedulerAction.TransportAction transportJobSchedulerAction;
|
||||
|
||||
@Inject
|
||||
|
@ -29,13 +28,14 @@ public class RestStopJobSchedulerAction extends BaseRestHandler {
|
|||
StopJobSchedulerAction.TransportAction transportJobSchedulerAction) {
|
||||
super(settings);
|
||||
this.transportJobSchedulerAction = transportJobSchedulerAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, PrelertPlugin.BASE_PATH + "schedulers/{jobId}/_stop", this);
|
||||
controller.registerHandler(RestRequest.Method.POST, PrelertPlugin.BASE_PATH + "schedulers/{"
|
||||
+ Job.ID.getPreferredName() + "}/_stop", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
StopJobSchedulerAction.Request jobSchedulerRequest = new StopJobSchedulerAction.Request(
|
||||
restRequest.param(JOB_ID.getPreferredName()));
|
||||
restRequest.param(Job.ID.getPreferredName()));
|
||||
return channel -> transportJobSchedulerAction.execute(jobSchedulerRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,35 +48,35 @@ job.cannot.delete.while.scheduler.runs = Cannot delete job ''{0}'' while the sch
|
|||
job.cannot.pause = Cannot pause job ''{0}'' while its status is {1}
|
||||
job.cannot.resume = Cannot resume job ''{0}'' while its status is {1}
|
||||
|
||||
job.config.byField.incompatible.function = byFieldName cannot be used with function ''{0}''
|
||||
job.config.byField.needs.another = byFieldName must be used in conjunction with fieldName or function
|
||||
job.config.byField.incompatible.function = by_field_name cannot be used with function ''{0}''
|
||||
job.config.byField.needs.another = by_field_name must be used in conjunction with field_name or function
|
||||
job.config.cannot.encrypt.password = Cannot encrypt password
|
||||
job.config.categorization.filters.require.categorization.field.name = categorizationFilters require setting categorizationFieldName
|
||||
job.config.categorization.filters.contains.duplicates = categorizationFilters contain duplicates
|
||||
job.config.categorization.filter.contains.empty = categorizationFilters are not allowed to contain empty strings
|
||||
job.config.categorization.filter.contains.invalid.regex = categorizationFilters contains invalid regular expression ''{0}''
|
||||
job.config.categorization.filters.require.categorization.field.name = categorization_filters require setting categorization_field_name
|
||||
job.config.categorization.filters.contains.duplicates = categorization_filters contain duplicates
|
||||
job.config.categorization.filter.contains.empty = categorization_filters are not allowed to contain empty strings
|
||||
job.config.categorization.filter.contains.invalid.regex = categorization_filters contains invalid regular expression ''{0}''
|
||||
job.config.condition.invalid.operator = Invalid operator for condition
|
||||
job.config.condition.invalid.value.null = Invalid condition: the value field cannot be null
|
||||
job.config.condition.invalid.value.numeric = Invalid condition value: cannot parse a double from string ''{0}''
|
||||
job.config.condition.invalid.value.regex = Invalid condition value: ''{0}'' is not a valid regular expression
|
||||
job.config.condition.unknown.operator = Unknown condition operator ''{0}''
|
||||
job.config.dataformat.requires.transform = When the data format is {0}, transforms are required.
|
||||
job.config.detectionrule.condition.categorical.invalid.option = Invalid detector rule: a categorical ruleCondition does not support {0}
|
||||
job.config.detectionrule.condition.categorical.missing.option = Invalid detector rule: a categorical ruleCondition requires {0} to be set
|
||||
job.config.detectionrule.condition.invalid.fieldname = Invalid detector rule: fieldName has to be one of {0}; actual was ''{1}''
|
||||
job.config.detectionrule.condition.missing.fieldname = Invalid detector rule: missing fieldName in ruleCondition where fieldValue ''{0}'' is set
|
||||
job.config.detectionrule.condition.categorical.invalid.option = Invalid detector rule: a categorical rule_condition does not support {0}
|
||||
job.config.detectionrule.condition.categorical.missing.option = Invalid detector rule: a categorical rule_condition requires {0} to be set
|
||||
job.config.detectionrule.condition.invalid.fieldname = Invalid detector rule: field_name has to be one of {0}; actual was ''{1}''
|
||||
job.config.detectionrule.condition.missing.fieldname = Invalid detector rule: missing field_name in rule_condition where field_value ''{0}'' is set
|
||||
job.config.detectionrule.condition.numerical.invalid.operator = Invalid detector rule: operator ''{0}'' is not allowed
|
||||
job.config.detectionrule.condition.numerical.invalid.option = Invalid detector rule: a numerical ruleCondition does not support {0}
|
||||
job.config.detectionrule.condition.numerical.missing.option = Invalid detector rule: a numerical ruleCondition requires {0} to be set
|
||||
job.config.detectionrule.condition.numerical.with.fieldname.requires.fieldvalue = Invalid detector rule: a numerical ruleCondition with fieldName requires that fieldValue is set
|
||||
job.config.detectionrule.invalid.targetfieldname = Invalid detector rule: targetFieldName has to be one of {0}; actual was ''{1}''
|
||||
job.config.detectionrule.missing.targetfieldname = Invalid detector rule: missing targetFieldName where targetFieldValue ''{0}'' is set
|
||||
job.config.detectionrule.condition.numerical.invalid.option = Invalid detector rule: a numerical rule_condition does not support {0}
|
||||
job.config.detectionrule.condition.numerical.missing.option = Invalid detector rule: a numerical rule_condition requires {0} to be set
|
||||
job.config.detectionrule.condition.numerical.with.fieldname.requires.fieldvalue = Invalid detector rule: a numerical rule_condition with field_name requires that field_value is set
|
||||
job.config.detectionrule.invalid.targetfieldname = Invalid detector rule: target_field_name has to be one of {0}; actual was ''{1}''
|
||||
job.config.detectionrule.missing.targetfieldname = Invalid detector rule: missing target_field_name where target_field_value ''{0}'' is set
|
||||
job.config.detectionrule.not.supported.by.function = Invalid detector rule: function {0} does not support rules
|
||||
job.config.detectionrule.requires.at.least.one.condition = Invalid detector rule: at least one ruleCondition is required
|
||||
job.config.fieldname.incompatible.function = fieldName cannot be used with function ''{0}''
|
||||
job.config.function.requires.byfield = byFieldName must be set when the ''{0}'' function is used
|
||||
job.config.function.requires.fieldname = fieldName must be set when the ''{0}'' function is used
|
||||
job.config.function.requires.overfield = overFieldName must be set when the ''{0}'' function is used
|
||||
job.config.detectionrule.requires.at.least.one.condition = Invalid detector rule: at least one rule_condition is required
|
||||
job.config.fieldname.incompatible.function = field_name cannot be used with function ''{0}''
|
||||
job.config.function.requires.byfield = by_field_name must be set when the ''{0}'' function is used
|
||||
job.config.function.requires.fieldname = field_name must be set when the ''{0}'' function is used
|
||||
job.config.function.requires.overfield = over_field_name must be set when the ''{0}'' function is used
|
||||
job.config.function.incompatible.presummarized = The ''{0}'' function cannot be used in jobs that will take pre-summarized input
|
||||
job.config.id.already.taken = The job cannot be created with the Id ''{0}''. The Id is already used.
|
||||
job.config.id.too.long = The job id cannot contain more than {0,number,integer} characters.
|
||||
|
@ -84,24 +84,24 @@ job.config.invalid.fieldname.chars = Invalid fieldname ''{0}''. Fieldnames inclu
|
|||
job.config.invalid.jobid.chars = Invalid job id; must be lowercase alphanumeric and may contain hyphens or underscores
|
||||
job.config.invalid.timeformat = Invalid Time format string ''{0}''
|
||||
job.config.missing.analysisconfig = Either an an AnalysisConfig or job reference id must be set
|
||||
job.config.model.debug.config.invalid.bounds.percentile = Invalid modelDebugConfig: boundsPercentile must be in the range [0, 100]
|
||||
job.config.model.debug.config.invalid.bounds.percentile = Invalid modelDebugConfig: bounds_percentile must be in the range [0, 100]
|
||||
job.config.field.value.too.low = {0} cannot be less than {1,number}. Value = {2,number}
|
||||
job.config.no.analysis.field = One of function, fieldName, byFieldName or overFieldName must be set
|
||||
job.config.no.analysis.field.not.count = Unless the function is 'count' one of fieldName, byFieldName or overFieldName must be set
|
||||
job.config.no.analysis.field = One of function, fieldName, by_field_name or over_field_name must be set
|
||||
job.config.no.analysis.field.not.count = Unless the function is 'count' one of field_name, by_field_name or over_field_name must be set
|
||||
job.config.no.detectors = No detectors configured
|
||||
job.config.overField.incompatible.function = overFieldName cannot be used with function ''{0}''
|
||||
job.config.overField.needs.another = overFieldName must be used in conjunction with fieldName or function
|
||||
job.config.overField.incompatible.function = over_field_name cannot be used with function ''{0}''
|
||||
job.config.overField.needs.another = over_field_name must be used in conjunction with field_name or function
|
||||
job.config.overlapping.buckets.incompatible.function = Overlapping buckets cannot be used with function ''{0}''
|
||||
job.config.multiple.bucketspans.require.bucketspan = Multiple bucketSpans require a bucketSpan to be specified
|
||||
job.config.multiple.bucketspans.must.be.multiple = Multiple bucketSpan ''{0}'' must be a multiple of the main bucketSpan ''{1}''
|
||||
job.config.multiple.bucketspans.require.bucket_span = Multiple bucket_spans require a bucket_span to be specified
|
||||
job.config.multiple.bucketspans.must.be.multiple = Multiple bucket_span ''{0}'' must be a multiple of the main bucket_span ''{1}''
|
||||
job.config.per.partition.normalisation.requires.partition.field = If the job is configured with Per-Partition Normalization enabled a detector must have a partition field
|
||||
job.config.per.partition.normalisation.cannot.use.influencers = A job configured with Per-Partition Normalization cannot use influencers
|
||||
|
||||
job.config.update.analysis.limits.parse.error = JSON parse error reading the update value for analysisLimits
|
||||
job.config.update.analysis.limits.cannot.be.null = Invalid update value for analysisLimits: null
|
||||
job.config.update.analysis.limits.model.memory.limit.cannot.be.decreased = Invalid update value for analysisLimits: modelMemoryLimit cannot be decreased; existing is {0}, update had {1}
|
||||
job.config.update.categorization.filters.invalid = Invalid update value for categorizationFilters: value must be an array of strings; actual was: {0}
|
||||
job.config.update.custom.settings.invalid = Invalid update value for customSettings: value must be an object
|
||||
job.config.update.analysis.limits.model.memory.limit.cannot.be.decreased = Invalid update value for analysis_limits: model_memory_limit cannot be decreased; existing is {0}, update had {1}
|
||||
job.config.update.categorization.filters.invalid = Invalid update value for categorization_filters: value must be an array of strings; actual was: {0}
|
||||
job.config.update.custom.settings.invalid = Invalid update value for custom_settings: value must be an object
|
||||
job.config.update.description.invalid = Invalid update value for job description: value must be a string
|
||||
job.config.update.detectors.invalid = Invalid update value for detectors: value must be an array
|
||||
job.config.update.detectors.invalid.detector.index = Invalid index: valid range is [{0}, {1}]; actual was: {2}
|
||||
|
@ -110,19 +110,19 @@ job.config.update.detectors.missing.params = Invalid update value for detectors:
|
|||
job.config.update.detectors.description.should.be.string = Invalid description: string expected; actual was: {0}
|
||||
job.config.update.detectors.rules.parse.error = JSON parse error reading the update value for detectorRules
|
||||
job.config.update.failed = Update failed. Please see the logs to trace the cause of the failure.
|
||||
job.config.update.ignore.downtime.parse.error = Invalid update value for ignoreDowntime: expected one of {0}; actual was: {1}
|
||||
job.config.update.ignore.downtime.parse.error = Invalid update value for ignore_downtime: expected one of {0}; actual was: {1}
|
||||
job.config.update.invalid.key = Invalid key ''{0}''
|
||||
job.config.update.job.is.not.closed = Cannot update key ''{0}'' while job is not closed; current status is {1}
|
||||
job.config.update.model.debug.config.parse.error = JSON parse error reading the update value for ModelDebugConfig
|
||||
job.config.update.requires.non.empty.object = Update requires JSON that contains a non-empty object
|
||||
job.config.update.parse.error = JSON parse error reading the job update
|
||||
job.config.update.background.persist.interval.invalid = Invalid update value for backgroundPersistInterval: value must be an exact number of seconds no less than 3600
|
||||
job.config.update.renormalization.window.days.invalid = Invalid update value for renormalizationWindowDays: value must be an exact number of days
|
||||
job.config.update.model.snapshot.retention.days.invalid = Invalid update value for modelSnapshotRetentionDays: value must be an exact number of days
|
||||
job.config.update.results.retention.days.invalid = Invalid update value for resultsRetentionDays: value must be an exact number of days
|
||||
job.config.update.scheduler.config.parse.error = JSON parse error reading the update value for schedulerConfig
|
||||
job.config.update.scheduler.config.cannot.be.null = Invalid update value for schedulerConfig: null
|
||||
job.config.update.scheduler.config.data.source.invalid = Invalid update value for schedulerConfig: dataSource cannot be changed; existing is {0}, update had {1}
|
||||
job.config.update.background.persist.interval.invalid = Invalid update value for background_persist_interval: value must be an exact number of seconds no less than 3600
|
||||
job.config.update.renormalization.window.days.invalid = Invalid update value for renormalization_window_days: value must be an exact number of days
|
||||
job.config.update.model.snapshot.retention.days.invalid = Invalid update value for model_snapshot_retention_days: value must be an exact number of days
|
||||
job.config.update.results.retention.days.invalid = Invalid update value for results_retention_days: value must be an exact number of days
|
||||
job.config.update.scheduler.config.parse.error = JSON parse error reading the update value for scheduler_config
|
||||
job.config.update.scheduler.config.cannot.be.null = Invalid update value for scheduler_config: null
|
||||
job.config.update.scheduler.config.data.source.invalid = Invalid update value for scheduler_config: data_source cannot be changed; existing is {0}, update had {1}
|
||||
|
||||
job.config.transform.circular.dependency = Transform type {0} with inputs {1} has a circular dependency
|
||||
job.config.transform.condition.required = A condition must be defined for transform ''{0}''
|
||||
|
@ -141,9 +141,9 @@ job.config.unknown.function = Unknown function ''{0}''
|
|||
job.config.scheduler.unknown.datasource = Unknown scheduler dataSource ''{0}''
|
||||
job.config.scheduler.field.not.supported = Scheduler configuration field {0} not supported for dataSource ''{1}''
|
||||
job.config.scheduler.invalid.option.value = Invalid {0} value ''{1}'' in scheduler configuration
|
||||
job.config.scheduler.requires.bucket.span = A job configured with scheduler requires that bucketSpan is specified
|
||||
job.config.scheduler.requires.bucket.span = A job configured with scheduler requires that bucket_span is specified
|
||||
job.config.scheduler.elasticsearch.does.not.support.latency = A job configured with an Elasticsearch scheduler cannot support latency
|
||||
job.config.scheduler.aggregations.requires.summary.count.field = A scheduler job with aggregations for dataSource ''{0}'' must have summaryCountFieldName ''{1}''
|
||||
job.config.scheduler.aggregations.requires.summary.count.field = A scheduler job with aggregations for dataSource ''{0}'' must have summary_count_field_name ''{1}''
|
||||
job.config.scheduler.elasticsearch.requires.dataformat.elasticsearch = A job configured with an Elasticsearch scheduler must have dataFormat ''ELASTICSEARCH''
|
||||
job.config.scheduler.incomplete.credentials = Both username and password must be specified if either is
|
||||
job.config.scheduler.multiple.passwords = Both password and encryptedPassword were specified - please just specify one
|
||||
|
|
|
@ -34,12 +34,12 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
private static final String RESULT_MAPPING = "{ \"mappings\": {\"result\": { \"properties\": { " +
|
||||
"\"result_type\": { \"type\" : \"keyword\" }," +
|
||||
"\"timestamp\": { \"type\" : \"date\" }, " +
|
||||
"\"anomalyScore\": { \"type\" : \"double\" }, " +
|
||||
"\"normalizedProbability\": { \"type\" : \"double\" }, " +
|
||||
"\"overFieldValue\": { \"type\" : \"keyword\" }, " +
|
||||
"\"partitionFieldValue\": { \"type\" : \"keyword\" }, " +
|
||||
"\"byFieldValue\": { \"type\" : \"keyword\" }, " +
|
||||
"\"fieldName\": { \"type\" : \"keyword\" }, " +
|
||||
"\"anomaly_score\": { \"type\" : \"double\" }, " +
|
||||
"\"normalized_probability\": { \"type\" : \"double\" }, " +
|
||||
"\"over_field_value\": { \"type\" : \"keyword\" }, " +
|
||||
"\"partition_field_value\": { \"type\" : \"keyword\" }, " +
|
||||
"\"by_field_value\": { \"type\" : \"keyword\" }, " +
|
||||
"\"field_name\": { \"type\" : \"keyword\" }, " +
|
||||
"\"function\": { \"type\" : \"keyword\" } " +
|
||||
"} } } }";
|
||||
|
||||
|
@ -48,7 +48,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote\""));
|
||||
}
|
||||
|
||||
public void testGetJob_GivenNoSuchJob() throws Exception {
|
||||
|
@ -67,7 +67,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":1"));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote\""));
|
||||
}
|
||||
|
||||
public void testGetJobs_GivenNegativeFrom() throws Exception {
|
||||
|
@ -102,7 +102,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":1"));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote\""));
|
||||
}
|
||||
|
||||
public void testGetJobs_GivenMultipleJobs() throws Exception {
|
||||
|
@ -115,9 +115,9 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":3"));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote_1\""));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote_2\""));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote_3\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote_1\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote_2\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote_3\""));
|
||||
}
|
||||
|
||||
public void testGetJobs_GivenMultipleJobsAndFromIsOne() throws Exception {
|
||||
|
@ -130,9 +130,9 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":3"));
|
||||
assertThat(responseAsString, not(containsString("\"jobId\":\"farequote_1\"")));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote_2\""));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote_3\""));
|
||||
assertThat(responseAsString, not(containsString("\"job_id\":\"farequote_1\"")));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote_2\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote_3\""));
|
||||
}
|
||||
|
||||
public void testGetJobs_GivenMultipleJobsAndSizeIsOne() throws Exception {
|
||||
|
@ -145,9 +145,9 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":3"));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote_1\""));
|
||||
assertThat(responseAsString, not(containsString("\"jobId\":\"farequote_2\"")));
|
||||
assertThat(responseAsString, not(containsString("\"jobId\":\"farequote_3\"")));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote_1\""));
|
||||
assertThat(responseAsString, not(containsString("\"job_id\":\"farequote_2\"")));
|
||||
assertThat(responseAsString, not(containsString("\"job_id\":\"farequote_3\"")));
|
||||
}
|
||||
|
||||
public void testGetJobs_GivenMultipleJobsAndFromIsOneAndSizeIsOne() throws Exception {
|
||||
|
@ -160,9 +160,9 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":3"));
|
||||
assertThat(responseAsString, not(containsString("\"jobId\":\"farequote_1\"")));
|
||||
assertThat(responseAsString, containsString("\"jobId\":\"farequote_2\""));
|
||||
assertThat(responseAsString, not(containsString("\"jobId\":\"farequote_3\"")));
|
||||
assertThat(responseAsString, not(containsString("\"job_id\":\"farequote_1\"")));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"farequote_2\""));
|
||||
assertThat(responseAsString, not(containsString("\"job_id\":\"farequote_3\"")));
|
||||
}
|
||||
|
||||
private Response createFarequoteJob() throws Exception {
|
||||
|
@ -170,11 +170,12 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
private Response createFarequoteJob(String jobId) throws Exception {
|
||||
String job = "{\n" + " \"jobId\":\"" + jobId + "\",\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysisConfig\" : {\n" + " \"bucketSpan\":3600,\n"
|
||||
+ " \"detectors\" :[{\"function\":\"metric\",\"fieldName\":\"responsetime\",\"byFieldName\":\"airline\"}]\n"
|
||||
+ " },\n" + " \"dataDescription\" : {\n" + " \"fieldDelimiter\":\",\",\n" + " \"timeField\":\"time\",\n"
|
||||
+ " \"timeFormat\":\"yyyy-MM-dd HH:mm:ssX\"\n" + " }\n" + "}";
|
||||
String job = "{\n" + " \"job_id\":\"" + jobId + "\",\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysis_config\" : {\n" + " \"bucket_span\":3600,\n"
|
||||
+ " \"detectors\" :[{\"function\":\"metric\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]\n"
|
||||
+ " },\n" + " \"data_description\" : {\n" + " \"field_delimiter\":\",\",\n" + " " +
|
||||
"\"time_field\":\"time\",\n"
|
||||
+ " \"time_format\":\"yyyy-MM-dd HH:mm:ssX\"\n" + " }\n" + "}";
|
||||
|
||||
return client().performRequest("put", PrelertPlugin.BASE_PATH + "jobs", Collections.emptyMap(), new StringEntity(job));
|
||||
}
|
||||
|
@ -267,7 +268,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
Response response = client().performRequest("get", PrelertPlugin.BASE_PATH + "jobs/farequote",
|
||||
Collections.singletonMap("metric", "config,status"));
|
||||
String responseEntityToString = responseEntityToString(response);
|
||||
assertThat(responseEntityToString, containsString("\"ignoreDowntime\":\"ONCE\""));
|
||||
assertThat(responseEntityToString, containsString("\"ignore_downtime\":\"ONCE\""));
|
||||
assertThat(responseEntityToString, containsString("\"status\":\"PAUSED\""));
|
||||
} catch (Exception e1) {
|
||||
fail();
|
||||
|
@ -319,7 +320,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
String bucketResult = String.format(Locale.ROOT,
|
||||
"{\"jobId\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucketSpan\": \"%s\"}",
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||
jobId, timestamp, bucketSpan);
|
||||
String id = String.format(Locale.ROOT,
|
||||
"%s_%s_%s", jobId, timestamp, bucketSpan);
|
||||
|
@ -337,7 +338,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
String bucketResult =
|
||||
String.format(Locale.ROOT, "{\"jobId\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"record\"}", jobId, timestamp);
|
||||
String.format(Locale.ROOT, "{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"record\"}", jobId, timestamp);
|
||||
return client().performRequest("put", "prelertresults-" + jobId + "/result/" + timestamp,
|
||||
Collections.singletonMap("refresh", "true"), new StringEntity(bucketResult));
|
||||
}
|
||||
|
|
|
@ -126,25 +126,26 @@ public class ScheduledJobIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
private Response createNonScheduledJob(String id) throws Exception {
|
||||
String job = "{\n" + " \"jobId\":\"" + id + "\",\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysisConfig\" : {\n" + " \"bucketSpan\":3600,\n"
|
||||
+ " \"detectors\" :[{\"function\":\"mean\",\"fieldName\":\"responsetime\",\"byFieldName\":\"airline\"}]\n"
|
||||
+ " },\n" + " \"dataDescription\" : {\n" + " \"fieldDelimiter\":\",\",\n" + " \"timeField\":\"time\",\n"
|
||||
+ " \"timeFormat\":\"yyyy-MM-dd'T'HH:mm:ssX\"\n" + " }\n" + "}";
|
||||
String job = "{\n" + " \"job_id\":\"" + id + "\",\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysis_config\" : {\n" + " \"bucket_span\":3600,\n"
|
||||
+ " \"detectors\" :[{\"function\":\"mean\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]\n"
|
||||
+ " },\n" + " \"data_description\" : {\n" + " \"field_delimiter\":\",\",\n" + " " +
|
||||
"\"time_field\":\"time\",\n"
|
||||
+ " \"time_format\":\"yyyy-MM-dd'T'HH:mm:ssX\"\n" + " }\n" + "}";
|
||||
|
||||
return client().performRequest("put", PrelertPlugin.BASE_PATH + "jobs", Collections.emptyMap(), new StringEntity(job));
|
||||
}
|
||||
|
||||
private Response createScheduledJob(String id) throws Exception {
|
||||
HttpHost httpHost = getClusterHosts().get(0);
|
||||
String job = "{\n" + " \"jobId\":\"" + id + "\",\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysisConfig\" : {\n" + " \"bucketSpan\":3600,\n"
|
||||
+ " \"detectors\" :[{\"function\":\"mean\",\"fieldName\":\"responsetime\",\"byFieldName\":\"airline\"}]\n"
|
||||
+ " },\n" + " \"dataDescription\" : {\n" + " \"format\":\"ELASTICSEARCH\",\n"
|
||||
+ " \"timeField\":\"time\",\n" + " \"timeFormat\":\"yyyy-MM-dd'T'HH:mm:ssX\"\n" + " },\n"
|
||||
+ " \"schedulerConfig\" : {\n" + " \"dataSource\":\"ELASTICSEARCH\",\n"
|
||||
+ " \"baseUrl\":\"" + httpHost.toURI() + "\",\n" + " \"indexes\":[\"airline-data\"],\n"
|
||||
+ " \"types\":[\"response\"],\n" + " \"retrieveWholeSource\":true\n" + " }\n" + "}";
|
||||
String job = "{\n" + " \"job_id\":\"" + id + "\",\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysis_config\" : {\n" + " \"bucket_span\":3600,\n"
|
||||
+ " \"detectors\" :[{\"function\":\"mean\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]\n"
|
||||
+ " },\n" + " \"data_description\" : {\n" + " \"format\":\"ELASTICSEARCH\",\n"
|
||||
+ " \"time_field\":\"time\",\n" + " \"time_format\":\"yyyy-MM-dd'T'HH:mm:ssX\"\n" + " },\n"
|
||||
+ " \"scheduler_config\" : {\n" + " \"data_source\":\"ELASTICSEARCH\",\n"
|
||||
+ " \"base_url\":\"" + httpHost.toURI() + "\",\n" + " \"indexes\":[\"airline-data\"],\n"
|
||||
+ " \"types\":[\"response\"],\n" + " \"retrieve_whole_source\":true\n" + " }\n" + "}";
|
||||
|
||||
return client().performRequest("put", PrelertPlugin.BASE_PATH + "jobs", Collections.emptyMap(), new StringEntity(job));
|
||||
}
|
||||
|
@ -206,7 +207,7 @@ public class ScheduledJobIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
for (Map<String, Object> jobConfig : jobConfigs) {
|
||||
String jobId = (String) jobConfig.get("jobId");
|
||||
String jobId = (String) jobConfig.get("job_id");
|
||||
try {
|
||||
client.performRequest("POST", "/_xpack/prelert/schedulers/" + jobId + "/_stop");
|
||||
waitForSchedulerStoppedState(client, jobId);
|
||||
|
|
|
@ -449,7 +449,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
new AnalysisConfig.Builder(Collections.singletonList(d)).build();
|
||||
assertTrue(false); // shouldn't get here
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Unless the function is 'count' one of fieldName, byFieldName or overFieldName must be set", e.getMessage());
|
||||
assertEquals("Unless the function is 'count' one of field_name, by_field_name or over_field_name must be set", e.getMessage());
|
||||
}
|
||||
|
||||
// should work now
|
||||
|
@ -481,7 +481,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
acBuilder.build();
|
||||
assertTrue(false); // shouldn't get here
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("batchSpan cannot be less than 0. Value = -1", e.getMessage());
|
||||
assertEquals("batch_span cannot be less than 0. Value = -1", e.getMessage());
|
||||
}
|
||||
|
||||
acBuilder.setBatchSpan(10L);
|
||||
|
@ -490,7 +490,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
acBuilder.build();
|
||||
assertTrue(false); // shouldn't get here
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("bucketSpan cannot be less than 0. Value = -1", e.getMessage());
|
||||
assertEquals("bucket_span cannot be less than 0. Value = -1", e.getMessage());
|
||||
}
|
||||
|
||||
acBuilder.setBucketSpan(3600L);
|
||||
|
@ -518,7 +518,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> config.build());
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, "bucketSpan", 0, -1), e.getMessage());
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, "bucket_span", 0, -1), e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNegativeBatchSpan() {
|
||||
|
@ -527,7 +527,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> analysisConfig.build());
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, "batchSpan", 0, -1), e.getMessage());
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, "batch_span", 0, -1), e.getMessage());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -176,7 +176,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
|||
XContentParser parser = XContentFactory.xContent(json).createParser(json);
|
||||
ParsingException ex = expectThrows(ParsingException.class,
|
||||
() -> DataDescription.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT));
|
||||
assertThat(ex.getMessage(), containsString("[dataDescription] failed to parse field [format]"));
|
||||
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [format]"));
|
||||
Throwable cause = ex.getCause();
|
||||
assertNotNull(cause);
|
||||
assertThat(cause, instanceOf(IllegalArgumentException.class));
|
||||
|
@ -185,11 +185,11 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
|||
}
|
||||
|
||||
public void testInvalidFieldDelimiter() throws Exception {
|
||||
BytesArray json = new BytesArray("{ \"fieldDelimiter\":\",,\" }");
|
||||
BytesArray json = new BytesArray("{ \"field_delimiter\":\",,\" }");
|
||||
XContentParser parser = XContentFactory.xContent(json).createParser(json);
|
||||
ParsingException ex = expectThrows(ParsingException.class,
|
||||
() -> DataDescription.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT));
|
||||
assertThat(ex.getMessage(), containsString("[dataDescription] failed to parse field [fieldDelimiter]"));
|
||||
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [field_delimiter]"));
|
||||
Throwable cause = ex.getCause();
|
||||
assertNotNull(cause);
|
||||
assertThat(cause, instanceOf(IllegalArgumentException.class));
|
||||
|
@ -198,11 +198,11 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
|||
}
|
||||
|
||||
public void testInvalidQuoteCharacter() throws Exception {
|
||||
BytesArray json = new BytesArray("{ \"quoteCharacter\":\"''\" }");
|
||||
BytesArray json = new BytesArray("{ \"quote_character\":\"''\" }");
|
||||
XContentParser parser = XContentFactory.xContent(json).createParser(json);
|
||||
ParsingException ex = expectThrows(ParsingException.class,
|
||||
() -> DataDescription.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT));
|
||||
assertThat(ex.getMessage(), containsString("[dataDescription] failed to parse field [quoteCharacter]"));
|
||||
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [quote_character]"));
|
||||
Throwable cause = ex.getCause();
|
||||
assertNotNull(cause);
|
||||
assertThat(cause, instanceOf(IllegalArgumentException.class));
|
||||
|
|
|
@ -172,7 +172,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
builder.setAnalysisLimits(new AnalysisLimits(42L, null));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> builder.setAnalysisLimits(new AnalysisLimits(41L, null)));
|
||||
assertEquals("Invalid update value for analysisLimits: modelMemoryLimit cannot be decreased; existing is 42, update had 41",
|
||||
assertEquals("Invalid update value for analysis_limits: model_memory_limit cannot be decreased; existing is 42, update had 41",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ public class ModelSizeStatsTests extends AbstractSerializingTestCase<ModelSizeSt
|
|||
|
||||
public void testDefaultConstructor() {
|
||||
ModelSizeStats stats = new ModelSizeStats.Builder("foo").build();
|
||||
assertEquals("modelSizeStats", stats.getId());
|
||||
assertEquals("model_size_stats", stats.getId());
|
||||
assertEquals(0, stats.getModelBytes());
|
||||
assertEquals(0, stats.getTotalByFieldCount());
|
||||
assertEquals(0, stats.getTotalOverFieldCount());
|
||||
|
@ -41,7 +41,7 @@ public class ModelSizeStatsTests extends AbstractSerializingTestCase<ModelSizeSt
|
|||
|
||||
NullPointerException ex = expectThrows(NullPointerException.class, () -> stats.setMemoryStatus(null));
|
||||
|
||||
assertEquals("[memoryStatus] must not be null", ex.getMessage());
|
||||
assertEquals("[memory_status] must not be null", ex.getMessage());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -102,12 +102,12 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
|
|||
public void testAnalysisConfigRequiredFields() throws IOException {
|
||||
Logger logger = Loggers.getLogger(SchedulerConfigTests.class);
|
||||
|
||||
String jobConfigStr = "{" + "\"jobId\":\"farequote\"," + "\"schedulerConfig\" : {" + "\"dataSource\":\"ELASTICSEARCH\","
|
||||
+ "\"baseUrl\":\"http://localhost:9200/\"," + "\"indexes\":[\"farequote\"]," + "\"types\":[\"farequote\"],"
|
||||
+ "\"query\":{\"match_all\":{} }" + "}," + "\"analysisConfig\" : {" + "\"bucketSpan\":3600,"
|
||||
+ "\"detectors\" :[{\"function\":\"metric\",\"fieldName\":\"responsetime\",\"byFieldName\":\"airline\"}],"
|
||||
+ "\"influencers\" :[\"airline\"]" + "}," + "\"dataDescription\" : {" + "\"format\":\"ELASTICSEARCH\","
|
||||
+ "\"timeField\":\"@timestamp\"," + "\"timeFormat\":\"epoch_ms\"" + "}" + "}";
|
||||
String jobConfigStr = "{" + "\"job_id\":\"farequote\"," + "\"scheduler_config\" : {" + "\"data_source\":\"ELASTICSEARCH\","
|
||||
+ "\"base_url\":\"http://localhost:9200/\"," + "\"indexes\":[\"farequote\"]," + "\"types\":[\"farequote\"],"
|
||||
+ "\"query\":{\"match_all\":{} }" + "}," + "\"analysis_config\" : {" + "\"bucket_span\":3600,"
|
||||
+ "\"detectors\" :[{\"function\":\"metric\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}],"
|
||||
+ "\"influencers\" :[\"airline\"]" + "}," + "\"data_description\" : {" + "\"format\":\"ELASTICSEARCH\","
|
||||
+ "\"time_field\":\"@timestamp\"," + "\"time_format\":\"epoch_ms\"" + "}" + "}";
|
||||
|
||||
XContentParser parser = XContentFactory.xContent(jobConfigStr).createParser(jobConfigStr);
|
||||
Job jobConfig = Job.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT).build();
|
||||
|
@ -139,16 +139,16 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
|
|||
public void testAggsParse() throws IOException {
|
||||
Logger logger = Loggers.getLogger(SchedulerConfigTests.class);
|
||||
|
||||
String jobConfigStr = "{" + "\"jobId\":\"farequote\"," + "\"schedulerConfig\" : {" + "\"dataSource\":\"ELASTICSEARCH\","
|
||||
+ "\"baseUrl\":\"http://localhost:9200/\"," + "\"indexes\":[\"farequote\"]," + "\"types\":[\"farequote\"],"
|
||||
String jobConfigStr = "{" + "\"job_id\":\"farequote\"," + "\"scheduler_config\" : {" + "\"data_source\":\"ELASTICSEARCH\","
|
||||
+ "\"base_url\":\"http://localhost:9200/\"," + "\"indexes\":[\"farequote\"]," + "\"types\":[\"farequote\"],"
|
||||
+ "\"query\":{\"match_all\":{} }," + "\"aggs\" : {" + "\"top_level_must_be_time\" : {" + "\"histogram\" : {"
|
||||
+ "\"field\" : \"@timestamp\"," + "\"interval\" : 3600000" + "}," + "\"aggs\" : {" + "\"by_field_in_the_middle\" : { "
|
||||
+ "\"terms\" : {" + "\"field\" : \"airline\"," + "\"size\" : 0" + "}," + "\"aggs\" : {" + "\"stats_last\" : {"
|
||||
+ "\"avg\" : {" + "\"field\" : \"responsetime\"" + "}" + "}" + "} " + "}" + "}" + "}" + "}" + "},"
|
||||
+ "\"analysisConfig\" : {" + "\"summaryCountFieldName\":\"doc_count\"," + "\"bucketSpan\":3600,"
|
||||
+ "\"detectors\" :[{\"function\":\"avg\",\"fieldName\":\"responsetime\",\"byFieldName\":\"airline\"}],"
|
||||
+ "\"influencers\" :[\"airline\"]" + "}," + "\"dataDescription\" : {" + "\"format\":\"ELASTICSEARCH\","
|
||||
+ "\"timeField\":\"@timestamp\"," + "\"timeFormat\":\"epoch_ms\"" + "}" + "}";
|
||||
+ "\"analysis_config\" : {" + "\"summary_count_field_name\":\"doc_count\"," + "\"bucket_span\":3600,"
|
||||
+ "\"detectors\" :[{\"function\":\"avg\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}],"
|
||||
+ "\"influencers\" :[\"airline\"]" + "}," + "\"data_description\" : {" + "\"format\":\"ELASTICSEARCH\","
|
||||
+ "\"time_field\":\"@timestamp\"," + "\"time_format\":\"epoch_ms\"" + "}" + "}";
|
||||
|
||||
XContentParser parser = XContentFactory.xContent(jobConfigStr).createParser(jobConfigStr);
|
||||
Job jobConfig = Job.PARSER.parse(parser, () -> ParseFieldMatcher.STRICT).build();
|
||||
|
@ -354,14 +354,14 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
|
|||
public void testCheckValidFile_NoPath() {
|
||||
SchedulerConfig.Builder conf = new SchedulerConfig.Builder(DataSource.FILE);
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_INVALID_OPTION_VALUE, "filePath", "null"), e.getMessage());
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_INVALID_OPTION_VALUE, "file_path", "null"), e.getMessage());
|
||||
}
|
||||
|
||||
public void testCheckValidFile_EmptyPath() {
|
||||
SchedulerConfig.Builder conf = new SchedulerConfig.Builder(DataSource.FILE);
|
||||
conf.setFilePath("");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_INVALID_OPTION_VALUE, "filePath", ""), e.getMessage());
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_INVALID_OPTION_VALUE, "file_path", ""), e.getMessage());
|
||||
}
|
||||
|
||||
public void testCheckValidFile_InappropriateField() {
|
||||
|
@ -369,7 +369,7 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
|
|||
conf.setFilePath("myfile.csv");
|
||||
conf.setBaseUrl("http://localhost:9200/");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_FIELD_NOT_SUPPORTED, "baseUrl", DataSource.FILE), e.getMessage());
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_FIELD_NOT_SUPPORTED, "base_url", DataSource.FILE), e.getMessage());
|
||||
}
|
||||
|
||||
public void testCheckValidElasticsearch_AllOk() throws IOException {
|
||||
|
@ -457,7 +457,7 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
|
|||
conf.setQuery(parser.map());
|
||||
conf.setTailFile(true);
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_FIELD_NOT_SUPPORTED, "tailFile", DataSource.ELASTICSEARCH),
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_FIELD_NOT_SUPPORTED, "tail_file", DataSource.ELASTICSEARCH),
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
|
@ -529,7 +529,7 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
|
|||
public void testCheckValidElasticsearch_GivenNegativeQueryDelay() throws IOException {
|
||||
SchedulerConfig.Builder conf = new SchedulerConfig.Builder(DataSource.ELASTICSEARCH);
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setQueryDelay(-10L));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_INVALID_OPTION_VALUE, "queryDelay", -10L), e.getMessage());
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_INVALID_OPTION_VALUE, "query_delay", -10L), e.getMessage());
|
||||
}
|
||||
|
||||
public void testCheckValidElasticsearch_GivenZeroFrequency() throws IOException {
|
||||
|
@ -547,7 +547,7 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
|
|||
public void testCheckValidElasticsearch_GivenNegativeScrollSize() throws IOException {
|
||||
SchedulerConfig.Builder conf = new SchedulerConfig.Builder(DataSource.ELASTICSEARCH);
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setScrollSize(-1000));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_INVALID_OPTION_VALUE, "scrollSize", -1000L), e.getMessage());
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_SCHEDULER_INVALID_OPTION_VALUE, "scroll_size", -1000L), e.getMessage());
|
||||
}
|
||||
|
||||
public void testCheckValidElasticsearch_GivenBothAggregationsAndAggsAreSet() {
|
||||
|
|
|
@ -98,9 +98,9 @@ public class AuditorTests extends ESTestCase {
|
|||
IndexRequestBuilder indexRequestBuilder = Mockito.mock(IndexRequestBuilder.class);
|
||||
when(indexRequestBuilder.setSource(jsonCaptor.capture())).thenReturn(indexRequestBuilder);
|
||||
when(indexRequestBuilder.execute()).thenReturn(indexResponse);
|
||||
when(client.prepareIndex(indexCaptor.capture(), eq("auditMessage")))
|
||||
when(client.prepareIndex(indexCaptor.capture(), eq("audit_message")))
|
||||
.thenReturn(indexRequestBuilder);
|
||||
when(client.prepareIndex(indexCaptor.capture(), eq("auditActivity")))
|
||||
when(client.prepareIndex(indexCaptor.capture(), eq("audit_activity")))
|
||||
.thenReturn(indexRequestBuilder);
|
||||
}
|
||||
|
||||
|
|
|
@ -115,68 +115,68 @@ public class RuleConditionTests extends AbstractSerializingTestCase<RuleConditio
|
|||
Condition condition = new Condition(Operator.MATCH, "text");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.CATEGORICAL, null, null, condition, null));
|
||||
assertEquals("Invalid detector rule: a categorical ruleCondition does not support condition", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a categorical rule_condition does not support condition", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenCategoricalWithFieldValue() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.CATEGORICAL, "metric", "CPU", null, null));
|
||||
assertEquals("Invalid detector rule: a categorical ruleCondition does not support fieldValue", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a categorical rule_condition does not support field_value", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenCategoricalWithoutValueList() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.CATEGORICAL, null, null, null, null));
|
||||
assertEquals("Invalid detector rule: a categorical ruleCondition requires valueList to be set", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a categorical rule_condition requires value_list to be set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalActualWithValueList() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, null, "myList"));
|
||||
assertEquals("Invalid detector rule: a numerical ruleCondition does not support valueList", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition does not support value_list", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalActualWithoutCondition() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, null, null));
|
||||
assertEquals("Invalid detector rule: a numerical ruleCondition requires condition to be set", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition requires condition to be set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalActualWithFieldNameButNoFieldValue() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, "metric", null, new Condition(Operator.LT, "5"), null));
|
||||
assertEquals("Invalid detector rule: a numerical ruleCondition with fieldName requires that fieldValue is set", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition with field_name requires that field_value is set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalTypicalWithValueList() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, null, "myList"));
|
||||
assertEquals("Invalid detector rule: a numerical ruleCondition does not support valueList", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition does not support value_list", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalTypicalWithoutCondition() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, null, null));
|
||||
assertEquals("Invalid detector rule: a numerical ruleCondition requires condition to be set", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition requires condition to be set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalDiffAbsWithValueList() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_DIFF_ABS, null, null, null, "myList"));
|
||||
assertEquals("Invalid detector rule: a numerical ruleCondition does not support valueList", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition does not support value_list", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalDiffAbsWithoutCondition() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_DIFF_ABS, null, null, null, null));
|
||||
assertEquals("Invalid detector rule: a numerical ruleCondition requires condition to be set", e.getMessage());
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition requires condition to be set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenFieldValueWithoutFieldName() {
|
||||
Condition condition = new Condition(Operator.LTE, "5");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_DIFF_ABS, null, "foo", condition, null));
|
||||
assertEquals("Invalid detector rule: missing fieldName in ruleCondition where fieldValue 'foo' is set", e.getMessage());
|
||||
assertEquals("Invalid detector rule: missing field_name in rule_condition where field_value 'foo' is set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalAndOperatorEquals() {
|
||||
|
|
|
@ -35,7 +35,7 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
|||
.start("1000")
|
||||
.end("2000")
|
||||
.partitionValue("foo")
|
||||
.sortField("anomalyScore")
|
||||
.sortField("anomaly_score")
|
||||
.sortDescending(true)
|
||||
.build();
|
||||
|
||||
|
@ -48,7 +48,7 @@ public class BucketsQueryBuilderTests extends ESTestCase {
|
|||
assertEquals("1000", query.getStart());
|
||||
assertEquals("2000", query.getEnd());
|
||||
assertEquals("foo", query.getPartitionValue());
|
||||
assertEquals("anomalyScore", query.getSortField());
|
||||
assertEquals("anomaly_score", query.getSortField());
|
||||
assertTrue(query.isSortDescending());
|
||||
}
|
||||
|
||||
|
|
|
@ -40,13 +40,13 @@ public class ElasticsearchDotNotationReverserTests extends ESTestCase {
|
|||
private ElasticsearchDotNotationReverser createReverser() {
|
||||
ElasticsearchDotNotationReverser reverser = new ElasticsearchDotNotationReverser();
|
||||
// This should get ignored as it's a reserved field name
|
||||
reverser.add("bucketSpan", "3600");
|
||||
reverser.add("bucket_span", "3600");
|
||||
reverser.add("simple", "simon");
|
||||
reverser.add("cpu.user", "10");
|
||||
reverser.add("cpu.system", "5");
|
||||
reverser.add("cpu.wait", "1");
|
||||
// This should get ignored as one of its segments is a reserved field name
|
||||
reverser.add("foo.bucketSpan", "3600");
|
||||
reverser.add("foo.bucket_span", "3600");
|
||||
reverser.add("complex.nested.structure.first", "x");
|
||||
reverser.add("complex.nested.structure.second", "y");
|
||||
reverser.add("complex.nested.value", "z");
|
||||
|
|
|
@ -91,7 +91,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
|
||||
public void testGetQuantiles_GivenQuantilesHaveNonEmptyState() throws Exception {
|
||||
Map<String, Object> source = new HashMap<>();
|
||||
source.put(Quantiles.JOB_ID.getPreferredName(), "foo");
|
||||
source.put(Job.ID.getPreferredName(), "foo");
|
||||
source.put(Quantiles.TIMESTAMP.getPreferredName(), 0L);
|
||||
source.put(Quantiles.QUANTILE_STATE.getPreferredName(), "state");
|
||||
GetResponse getResponse = createGetResponse(true, source);
|
||||
|
@ -110,7 +110,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
|
||||
public void testGetQuantiles_GivenQuantilesHaveEmptyState() throws Exception {
|
||||
Map<String, Object> source = new HashMap<>();
|
||||
source.put(Quantiles.JOB_ID.getPreferredName(), "foo");
|
||||
source.put(Job.ID.getPreferredName(), "foo");
|
||||
source.put(Quantiles.TIMESTAMP.getPreferredName(), new Date(0L).getTime());
|
||||
source.put(Quantiles.QUANTILE_STATE.getPreferredName(), "");
|
||||
GetResponse getResponse = createGetResponse(true, source);
|
||||
|
@ -211,9 +211,9 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("job_id", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
map.put("bucket_span", 22);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -235,7 +235,8 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
QueryBuilder query = queryBuilder.getValue();
|
||||
String queryString = query.toString();
|
||||
assertTrue(
|
||||
queryString.matches("(?s).*maxNormalizedProbability[^}]*from. : 1\\.0.*must_not[^}]*term[^}]*isInterim.*value. : .true.*"));
|
||||
queryString.matches("(?s).*max_normalized_probability[^}]*from. : 1\\.0.*must_not[^}]*term[^}]*is_interim.*value. : .true" +
|
||||
".*"));
|
||||
}
|
||||
|
||||
public void testBuckets_OneBucketInterim()
|
||||
|
@ -245,9 +246,9 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("job_id", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
map.put("bucket_span", 22);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -268,9 +269,9 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
assertEquals(1L, buckets.count());
|
||||
QueryBuilder query = queryBuilder.getValue();
|
||||
String queryString = query.toString();
|
||||
assertTrue(queryString.matches("(?s).*maxNormalizedProbability[^}]*from. : 10\\.9.*"));
|
||||
assertTrue(queryString.matches("(?s).*anomalyScore[^}]*from. : 5\\.1.*"));
|
||||
assertFalse(queryString.matches("(?s).*isInterim.*"));
|
||||
assertTrue(queryString.matches("(?s).*max_normalized_probability[^}]*from. : 10\\.9.*"));
|
||||
assertTrue(queryString.matches("(?s).*anomaly_score[^}]*from. : 5\\.1.*"));
|
||||
assertFalse(queryString.matches("(?s).*is_interim.*"));
|
||||
}
|
||||
|
||||
public void testBuckets_UsingBuilder()
|
||||
|
@ -280,9 +281,9 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("job_id", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
map.put("bucket_span", 22);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -307,9 +308,9 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
assertEquals(1L, buckets.count());
|
||||
QueryBuilder query = queryBuilder.getValue();
|
||||
String queryString = query.toString();
|
||||
assertTrue(queryString.matches("(?s).*maxNormalizedProbability[^}]*from. : 10\\.9.*"));
|
||||
assertTrue(queryString.matches("(?s).*anomalyScore[^}]*from. : 5\\.1.*"));
|
||||
assertFalse(queryString.matches("(?s).*isInterim.*"));
|
||||
assertTrue(queryString.matches("(?s).*max_normalized_probability[^}]*from. : 10\\.9.*"));
|
||||
assertTrue(queryString.matches("(?s).*anomaly_score[^}]*from. : 5\\.1.*"));
|
||||
assertFalse(queryString.matches("(?s).*is_interim.*"));
|
||||
}
|
||||
|
||||
public void testBucket_NoBucketNoExpandNoInterim()
|
||||
|
@ -335,7 +336,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
BucketQueryBuilder bq = new BucketQueryBuilder(Long.toString(timestamp));
|
||||
|
||||
ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class,
|
||||
() ->provider.bucket(jobId, bq.build()));
|
||||
() -> provider.bucket(jobId, bq.build()));
|
||||
}
|
||||
|
||||
public void testBucket_OneBucketNoExpandNoInterim()
|
||||
|
@ -345,9 +346,9 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("job_id", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
map.put("bucket_span", 22);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -374,10 +375,10 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("job_id", "foo");
|
||||
map.put("timestamp", now.getTime());
|
||||
map.put("bucketSpan", 22);
|
||||
map.put("isInterim", true);
|
||||
map.put("bucket_span", 22);
|
||||
map.put("is_interim", true);
|
||||
source.add(map);
|
||||
|
||||
ArgumentCaptor<QueryBuilder> queryBuilder = ArgumentCaptor.forClass(QueryBuilder.class);
|
||||
|
@ -392,7 +393,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
BucketQueryBuilder bq = new BucketQueryBuilder(Long.toString(now.getTime()));
|
||||
|
||||
ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class,
|
||||
() ->provider.bucket(jobId, bq.build()));
|
||||
() -> provider.bucket(jobId, bq.build()));
|
||||
}
|
||||
|
||||
public void testRecords() throws InterruptedException, ExecutionException, IOException {
|
||||
|
@ -401,19 +402,19 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> recordMap1 = new HashMap<>();
|
||||
recordMap1.put("jobId", "foo");
|
||||
recordMap1.put("job_id", "foo");
|
||||
recordMap1.put("typical", 22.4);
|
||||
recordMap1.put("actual", 33.3);
|
||||
recordMap1.put("timestamp", now.getTime());
|
||||
recordMap1.put("function", "irritable");
|
||||
recordMap1.put("bucketSpan", 22);
|
||||
recordMap1.put("bucket_span", 22);
|
||||
Map<String, Object> recordMap2 = new HashMap<>();
|
||||
recordMap2.put("jobId", "foo");
|
||||
recordMap2.put("job_id", "foo");
|
||||
recordMap2.put("typical", 1122.4);
|
||||
recordMap2.put("actual", 933.3);
|
||||
recordMap2.put("timestamp", now.getTime());
|
||||
recordMap2.put("function", "irrascible");
|
||||
recordMap2.put("bucketSpan", 22);
|
||||
recordMap2.put("bucket_span", 22);
|
||||
source.add(recordMap1);
|
||||
source.add(recordMap2);
|
||||
|
||||
|
@ -451,19 +452,19 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> recordMap1 = new HashMap<>();
|
||||
recordMap1.put("jobId", "foo");
|
||||
recordMap1.put("job_id", "foo");
|
||||
recordMap1.put("typical", 22.4);
|
||||
recordMap1.put("actual", 33.3);
|
||||
recordMap1.put("timestamp", now.getTime());
|
||||
recordMap1.put("function", "irritable");
|
||||
recordMap1.put("bucketSpan", 22);
|
||||
recordMap1.put("bucket_span", 22);
|
||||
Map<String, Object> recordMap2 = new HashMap<>();
|
||||
recordMap2.put("jobId", "foo");
|
||||
recordMap2.put("job_id", "foo");
|
||||
recordMap2.put("typical", 1122.4);
|
||||
recordMap2.put("actual", 933.3);
|
||||
recordMap2.put("timestamp", now.getTime());
|
||||
recordMap2.put("function", "irrascible");
|
||||
recordMap2.put("bucketSpan", 22);
|
||||
recordMap2.put("bucket_span", 22);
|
||||
source.add(recordMap1);
|
||||
source.add(recordMap2);
|
||||
|
||||
|
@ -508,19 +509,19 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
|
||||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
Map<String, Object> recordMap1 = new HashMap<>();
|
||||
recordMap1.put("jobId", "foo");
|
||||
recordMap1.put("job_id", "foo");
|
||||
recordMap1.put("typical", 22.4);
|
||||
recordMap1.put("actual", 33.3);
|
||||
recordMap1.put("timestamp", now.getTime());
|
||||
recordMap1.put("function", "irritable");
|
||||
recordMap1.put("bucketSpan", 22);
|
||||
recordMap1.put("bucket_span", 22);
|
||||
Map<String, Object> recordMap2 = new HashMap<>();
|
||||
recordMap2.put("jobId", "foo");
|
||||
recordMap2.put("job_id", "foo");
|
||||
recordMap2.put("typical", 1122.4);
|
||||
recordMap2.put("actual", 933.3);
|
||||
recordMap2.put("timestamp", now.getTime());
|
||||
recordMap2.put("function", "irrascible");
|
||||
recordMap2.put("bucketSpan", 22);
|
||||
recordMap2.put("bucket_span", 22);
|
||||
source.add(recordMap1);
|
||||
source.add(recordMap2);
|
||||
|
||||
|
@ -556,12 +557,12 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
for (int i = 0; i < 400; i++) {
|
||||
Map<String, Object> recordMap = new HashMap<>();
|
||||
recordMap.put("jobId", "foo");
|
||||
recordMap.put("job_id", "foo");
|
||||
recordMap.put("typical", 22.4 + i);
|
||||
recordMap.put("actual", 33.3 + i);
|
||||
recordMap.put("timestamp", now.getTime());
|
||||
recordMap.put("function", "irritable");
|
||||
recordMap.put("bucketSpan", 22);
|
||||
recordMap.put("bucket_span", 22);
|
||||
source.add(recordMap);
|
||||
}
|
||||
|
||||
|
@ -587,12 +588,12 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
for (int i = 0; i < 600; i++) {
|
||||
Map<String, Object> recordMap = new HashMap<>();
|
||||
recordMap.put("jobId", "foo");
|
||||
recordMap.put("job_id", "foo");
|
||||
recordMap.put("typical", 22.4 + i);
|
||||
recordMap.put("actual", 33.3 + i);
|
||||
recordMap.put("timestamp", now.getTime());
|
||||
recordMap.put("function", "irritable");
|
||||
recordMap.put("bucketSpan", 22);
|
||||
recordMap.put("bucket_span", 22);
|
||||
source.add(recordMap);
|
||||
}
|
||||
|
||||
|
@ -619,8 +620,8 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("jobId", "foo");
|
||||
map.put("categoryId", String.valueOf(map.hashCode()));
|
||||
map.put("job_id", "foo");
|
||||
map.put("category_id", String.valueOf(map.hashCode()));
|
||||
map.put("terms", terms);
|
||||
|
||||
source.add(map);
|
||||
|
@ -647,8 +648,8 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
|
||||
Map<String, Object> source = new HashMap<>();
|
||||
String categoryId = String.valueOf(source.hashCode());
|
||||
source.put("jobId", "foo");
|
||||
source.put("categoryId", categoryId);
|
||||
source.put("job_id", "foo");
|
||||
source.put("category_id", categoryId);
|
||||
source.put("terms", terms);
|
||||
|
||||
GetResponse getResponse = createGetResponse(true, source);
|
||||
|
@ -671,21 +672,21 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> recordMap1 = new HashMap<>();
|
||||
recordMap1.put("jobId", "foo");
|
||||
recordMap1.put("job_id", "foo");
|
||||
recordMap1.put("probability", 0.555);
|
||||
recordMap1.put("influencerFieldName", "Builder");
|
||||
recordMap1.put("influencer_field_name", "Builder");
|
||||
recordMap1.put("timestamp", now.getTime());
|
||||
recordMap1.put("influencerFieldValue", "Bob");
|
||||
recordMap1.put("initialAnomalyScore", 22.2);
|
||||
recordMap1.put("anomalyScore", 22.6);
|
||||
recordMap1.put("influencer_field_value", "Bob");
|
||||
recordMap1.put("initial_anomaly_score", 22.2);
|
||||
recordMap1.put("anomaly_score", 22.6);
|
||||
Map<String, Object> recordMap2 = new HashMap<>();
|
||||
recordMap2.put("jobId", "foo");
|
||||
recordMap2.put("job_id", "foo");
|
||||
recordMap2.put("probability", 0.99);
|
||||
recordMap2.put("influencerFieldName", "Builder");
|
||||
recordMap2.put("influencer_field_name", "Builder");
|
||||
recordMap2.put("timestamp", now.getTime());
|
||||
recordMap2.put("influencerFieldValue", "James");
|
||||
recordMap2.put("initialAnomalyScore", 5.0);
|
||||
recordMap2.put("anomalyScore", 5.0);
|
||||
recordMap2.put("influencer_field_value", "James");
|
||||
recordMap2.put("initial_anomaly_score", 5.0);
|
||||
recordMap2.put("anomaly_score", 5.0);
|
||||
source.add(recordMap1);
|
||||
source.add(recordMap2);
|
||||
|
||||
|
@ -706,7 +707,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
assertEquals(2L, page.count());
|
||||
|
||||
String queryString = queryBuilder.getValue().toString();
|
||||
assertTrue(queryString.matches("(?s).*must_not[^}]*term[^}]*isInterim.*value. : .true.*"));
|
||||
assertTrue(queryString.matches("(?s).*must_not[^}]*term[^}]*is_interim.*value. : .true.*"));
|
||||
|
||||
List<Influencer> records = page.results();
|
||||
assertEquals("foo", records.get(0).getJobId());
|
||||
|
@ -732,21 +733,21 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> recordMap1 = new HashMap<>();
|
||||
recordMap1.put("jobId", "foo");
|
||||
recordMap1.put("job_id", "foo");
|
||||
recordMap1.put("probability", 0.555);
|
||||
recordMap1.put("influencerFieldName", "Builder");
|
||||
recordMap1.put("influencer_field_name", "Builder");
|
||||
recordMap1.put("timestamp", now.getTime());
|
||||
recordMap1.put("influencerFieldValue", "Bob");
|
||||
recordMap1.put("initialAnomalyScore", 22.2);
|
||||
recordMap1.put("anomalyScore", 22.6);
|
||||
recordMap1.put("influencer_field_value", "Bob");
|
||||
recordMap1.put("initial_anomaly_score", 22.2);
|
||||
recordMap1.put("anomaly_score", 22.6);
|
||||
Map<String, Object> recordMap2 = new HashMap<>();
|
||||
recordMap2.put("jobId", "foo");
|
||||
recordMap2.put("job_id", "foo");
|
||||
recordMap2.put("probability", 0.99);
|
||||
recordMap2.put("influencerFieldName", "Builder");
|
||||
recordMap2.put("influencer_field_name", "Builder");
|
||||
recordMap2.put("timestamp", now.getTime());
|
||||
recordMap2.put("influencerFieldValue", "James");
|
||||
recordMap2.put("initialAnomalyScore", 5.0);
|
||||
recordMap2.put("anomalyScore", 5.0);
|
||||
recordMap2.put("influencer_field_value", "James");
|
||||
recordMap2.put("initial_anomaly_score", 5.0);
|
||||
recordMap2.put("anomaly_score", 5.0);
|
||||
source.add(recordMap1);
|
||||
source.add(recordMap2);
|
||||
|
||||
|
@ -809,21 +810,21 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> recordMap1 = new HashMap<>();
|
||||
recordMap1.put("jobId", "foo");
|
||||
recordMap1.put("job_id", "foo");
|
||||
recordMap1.put("description", "snapshot1");
|
||||
recordMap1.put("restorePriority", 1);
|
||||
recordMap1.put("restore_priority", 1);
|
||||
recordMap1.put("timestamp", now.getTime());
|
||||
recordMap1.put("snapshotDocCount", 5);
|
||||
recordMap1.put("latestRecordTimeStamp", now.getTime());
|
||||
recordMap1.put("latestResultTimeStamp", now.getTime());
|
||||
recordMap1.put("snapshot_doc_count", 5);
|
||||
recordMap1.put("latest_record_time_stamp", now.getTime());
|
||||
recordMap1.put("latest_result_time_stamp", now.getTime());
|
||||
Map<String, Object> recordMap2 = new HashMap<>();
|
||||
recordMap2.put("jobId", "foo");
|
||||
recordMap2.put("job_id", "foo");
|
||||
recordMap2.put("description", "snapshot2");
|
||||
recordMap2.put("restorePriority", 999);
|
||||
recordMap2.put("restore_priority", 999);
|
||||
recordMap2.put("timestamp", now.getTime());
|
||||
recordMap2.put("snapshotDocCount", 6);
|
||||
recordMap2.put("latestRecordTimeStamp", now.getTime());
|
||||
recordMap2.put("latestResultTimeStamp", now.getTime());
|
||||
recordMap2.put("snapshot_doc_count", 6);
|
||||
recordMap2.put("latest_record_time_stamp", now.getTime());
|
||||
recordMap2.put("latest_result_time_stamp", now.getTime());
|
||||
source.add(recordMap1);
|
||||
source.add(recordMap2);
|
||||
|
||||
|
@ -865,21 +866,21 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
List<Map<String, Object>> source = new ArrayList<>();
|
||||
|
||||
Map<String, Object> recordMap1 = new HashMap<>();
|
||||
recordMap1.put("jobId", "foo");
|
||||
recordMap1.put("job_id", "foo");
|
||||
recordMap1.put("description", "snapshot1");
|
||||
recordMap1.put("restorePriority", 1);
|
||||
recordMap1.put("restore_priority", 1);
|
||||
recordMap1.put("timestamp", now.getTime());
|
||||
recordMap1.put("snapshotDocCount", 5);
|
||||
recordMap1.put("latestRecordTimeStamp", now.getTime());
|
||||
recordMap1.put("latestResultTimeStamp", now.getTime());
|
||||
recordMap1.put("snapshot_doc_count", 5);
|
||||
recordMap1.put("latest_record_time_stamp", now.getTime());
|
||||
recordMap1.put("latest_result_time_stamp", now.getTime());
|
||||
Map<String, Object> recordMap2 = new HashMap<>();
|
||||
recordMap2.put("jobId", "foo");
|
||||
recordMap2.put("job_id", "foo");
|
||||
recordMap2.put("description", "snapshot2");
|
||||
recordMap2.put("restorePriority", 999);
|
||||
recordMap2.put("restore_priority", 999);
|
||||
recordMap2.put("timestamp", now.getTime());
|
||||
recordMap2.put("snapshotDocCount", 6);
|
||||
recordMap2.put("latestRecordTimeStamp", now.getTime());
|
||||
recordMap2.put("latestResultTimeStamp", now.getTime());
|
||||
recordMap2.put("snapshot_doc_count", 6);
|
||||
recordMap2.put("latest_record_time_stamp", now.getTime());
|
||||
recordMap2.put("latest_result_time_stamp", now.getTime());
|
||||
source.add(recordMap1);
|
||||
source.add(recordMap2);
|
||||
|
||||
|
@ -914,7 +915,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
assertEquals(6, snapshots.get(1).getSnapshotDocCount());
|
||||
|
||||
String queryString = queryBuilder.getValue().toString();
|
||||
assertTrue(queryString.matches("(?s).*snapshotId.*value. : .snappyId.*description.*value. : .description1.*"));
|
||||
assertTrue(queryString.matches("(?s).*snapshot_id.*value. : .snappyId.*description.*value. : .description1.*"));
|
||||
}
|
||||
|
||||
public void testMergePartitionScoresIntoBucket() throws InterruptedException, ExecutionException {
|
||||
|
@ -1009,8 +1010,8 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
|
|||
.addIndicesExistsResponse(ElasticsearchJobProvider.PRELERT_USAGE_INDEX, true)
|
||||
.prepareGet(INDEX_NAME, CategorizerState.TYPE, "1", categorizerStateGetResponse1)
|
||||
.prepareGet(INDEX_NAME, CategorizerState.TYPE, "2", categorizerStateGetResponse2)
|
||||
.prepareGet(INDEX_NAME, ModelState.TYPE, "123_1", modelStateGetResponse1)
|
||||
.prepareGet(INDEX_NAME, ModelState.TYPE, "123_2", modelStateGetResponse2);
|
||||
.prepareGet(INDEX_NAME, ModelState.TYPE.getPreferredName(), "123_1", modelStateGetResponse1)
|
||||
.prepareGet(INDEX_NAME, ModelState.TYPE.getPreferredName(), "123_2", modelStateGetResponse2);
|
||||
|
||||
ElasticsearchJobProvider provider = createProvider(clientBuilder.build());
|
||||
|
||||
|
|
|
@ -92,7 +92,7 @@ public class ElasticsearchMappingsTests extends ESTestCase {
|
|||
overridden.add(Job.TYPE);
|
||||
overridden.add(ListDocument.TYPE.getPreferredName());
|
||||
overridden.add(ModelDebugOutput.TYPE.getPreferredName());
|
||||
overridden.add(ModelState.TYPE);
|
||||
overridden.add(ModelState.TYPE.getPreferredName());
|
||||
overridden.add(ModelSnapshot.TYPE.getPreferredName());
|
||||
overridden.add(ModelSizeStats.TYPE.getPreferredName());
|
||||
overridden.add(Quantiles.TYPE.getPreferredName());
|
||||
|
|
|
@ -31,7 +31,7 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
|
|||
.anomalyScoreThreshold(50.0d)
|
||||
.start("1000")
|
||||
.end("2000")
|
||||
.sortField("anomalyScore")
|
||||
.sortField("anomaly_score")
|
||||
.sortDescending(true)
|
||||
.build();
|
||||
|
||||
|
@ -41,7 +41,7 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
|
|||
assertEquals(50.0d, query.getAnomalyScoreFilter(), 0.00001);
|
||||
assertEquals("1000", query.getStart());
|
||||
assertEquals("2000", query.getEnd());
|
||||
assertEquals("anomalyScore", query.getSortField());
|
||||
assertEquals("anomaly_score", query.getSortField());
|
||||
assertTrue(query.isSortDescending());
|
||||
}
|
||||
|
||||
|
|
|
@ -69,20 +69,20 @@ public class JobResultsPersisterTests extends ESTestCase {
|
|||
assertEquals(2, list.size());
|
||||
|
||||
String s = list.get(0).string();
|
||||
assertTrue(s.matches(".*anomalyScore.:99\\.9.*"));
|
||||
assertTrue(s.matches(".*initialAnomalyScore.:88\\.8.*"));
|
||||
assertTrue(s.matches(".*maxNormalizedProbability.:42\\.0.*"));
|
||||
assertTrue(s.matches(".*recordCount.:1.*"));
|
||||
assertTrue(s.matches(".*eventCount.:57.*"));
|
||||
assertTrue(s.matches(".*bucketSpan.:123456.*"));
|
||||
assertTrue(s.matches(".*processingTimeMs.:8888.*"));
|
||||
assertTrue(s.matches(".*anomaly_score.:99\\.9.*"));
|
||||
assertTrue(s.matches(".*initial_anomaly_score.:88\\.8.*"));
|
||||
assertTrue(s.matches(".*max_normalized_probability.:42\\.0.*"));
|
||||
assertTrue(s.matches(".*record_count.:1.*"));
|
||||
assertTrue(s.matches(".*event_count.:57.*"));
|
||||
assertTrue(s.matches(".*bucket_span.:123456.*"));
|
||||
assertTrue(s.matches(".*processing_time_ms.:8888.*"));
|
||||
|
||||
s = list.get(1).string();
|
||||
assertTrue(s.matches(".*probability.:0\\.0054.*"));
|
||||
assertTrue(s.matches(".*influencerFieldName.:.biOne.*"));
|
||||
assertTrue(s.matches(".*initialAnomalyScore.:18\\.12.*"));
|
||||
assertTrue(s.matches(".*anomalyScore.:14\\.15.*"));
|
||||
assertTrue(s.matches(".*rawAnomalyScore.:19\\.19.*"));
|
||||
assertTrue(s.matches(".*influencer_field_name.:.biOne.*"));
|
||||
assertTrue(s.matches(".*initial_anomaly_score.:18\\.12.*"));
|
||||
assertTrue(s.matches(".*anomaly_score.:14\\.15.*"));
|
||||
assertTrue(s.matches(".*raw_anomaly_score.:19\\.19.*"));
|
||||
}
|
||||
|
||||
public void testPersistRecords() throws IOException {
|
||||
|
@ -127,24 +127,24 @@ public class JobResultsPersisterTests extends ESTestCase {
|
|||
assertEquals(1, captured.size());
|
||||
|
||||
String s = captured.get(0).string();
|
||||
assertTrue(s.matches(".*detectorIndex.:3.*"));
|
||||
assertTrue(s.matches(".*detector_index.:3.*"));
|
||||
assertTrue(s.matches(".*\"probability\":0\\.1.*"));
|
||||
assertTrue(s.matches(".*\"anomalyScore\":99\\.8.*"));
|
||||
assertTrue(s.matches(".*\"normalizedProbability\":0\\.005.*"));
|
||||
assertTrue(s.matches(".*initialNormalizedProbability.:23.4.*"));
|
||||
assertTrue(s.matches(".*bucketSpan.:42.*"));
|
||||
assertTrue(s.matches(".*byFieldName.:.byName.*"));
|
||||
assertTrue(s.matches(".*byFieldValue.:.byValue.*"));
|
||||
assertTrue(s.matches(".*correlatedByFieldValue.:.testCorrelations.*"));
|
||||
assertTrue(s.matches(".*\"anomaly_score\":99\\.8.*"));
|
||||
assertTrue(s.matches(".*\"normalized_probability\":0\\.005.*"));
|
||||
assertTrue(s.matches(".*initial_normalized_probability.:23.4.*"));
|
||||
assertTrue(s.matches(".*bucket_span.:42.*"));
|
||||
assertTrue(s.matches(".*by_field_name.:.byName.*"));
|
||||
assertTrue(s.matches(".*by_field_value.:.byValue.*"));
|
||||
assertTrue(s.matches(".*correlated_by_field_value.:.testCorrelations.*"));
|
||||
assertTrue(s.matches(".*typical.:.0\\.44,998765\\.3.*"));
|
||||
assertTrue(s.matches(".*actual.:.5\\.0,5\\.1.*"));
|
||||
assertTrue(s.matches(".*fieldName.:.testFieldName.*"));
|
||||
assertTrue(s.matches(".*field_name.:.testFieldName.*"));
|
||||
assertTrue(s.matches(".*function.:.testFunction.*"));
|
||||
assertTrue(s.matches(".*functionDescription.:.testDescription.*"));
|
||||
assertTrue(s.matches(".*partitionFieldName.:.partName.*"));
|
||||
assertTrue(s.matches(".*partitionFieldValue.:.partValue.*"));
|
||||
assertTrue(s.matches(".*overFieldName.:.overName.*"));
|
||||
assertTrue(s.matches(".*overFieldValue.:.overValue.*"));
|
||||
assertTrue(s.matches(".*function_description.:.testDescription.*"));
|
||||
assertTrue(s.matches(".*partition_field_name.:.partName.*"));
|
||||
assertTrue(s.matches(".*partition_field_value.:.partValue.*"));
|
||||
assertTrue(s.matches(".*over_field_name.:.overName.*"));
|
||||
assertTrue(s.matches(".*over_field_value.:.overValue.*"));
|
||||
}
|
||||
|
||||
public void testPersistInfluencers() throws IOException {
|
||||
|
@ -170,9 +170,9 @@ public class JobResultsPersisterTests extends ESTestCase {
|
|||
|
||||
String s = captured.get(0).string();
|
||||
assertTrue(s.matches(".*probability.:0\\.4.*"));
|
||||
assertTrue(s.matches(".*influencerFieldName.:.infName1.*"));
|
||||
assertTrue(s.matches(".*influencerFieldValue.:.infValue1.*"));
|
||||
assertTrue(s.matches(".*initialAnomalyScore.:55\\.5.*"));
|
||||
assertTrue(s.matches(".*anomalyScore.:16\\.0.*"));
|
||||
assertTrue(s.matches(".*influencer_field_name.:.infName1.*"));
|
||||
assertTrue(s.matches(".*influencer_field_value.:.infValue1.*"));
|
||||
assertTrue(s.matches(".*initial_anomaly_score.:55\\.5.*"));
|
||||
assertTrue(s.matches(".*anomaly_score.:16\\.0.*"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -102,7 +102,7 @@ public class AutodetectCommunicatorTests extends ESTestCase {
|
|||
Job.Builder builder = new Job.Builder("foo");
|
||||
|
||||
DataDescription.Builder dd = new DataDescription.Builder();
|
||||
dd.setTimeField("timeField");
|
||||
dd.setTimeField("time_field");
|
||||
|
||||
Detector.Builder detector = new Detector.Builder("metric", "value");
|
||||
detector.setByFieldName("host-metric");
|
||||
|
|
|
@ -29,186 +29,201 @@ import java.util.stream.Collectors;
|
|||
public class AutodetectResultsParserTests extends ESTestCase {
|
||||
private static final double EPSILON = 0.000001;
|
||||
|
||||
public static final String METRIC_OUTPUT_SAMPLE = "[{\"bucket\": {\"jobId\":\"foo\",\"timestamp\":1359450000000,"
|
||||
+ "\"bucketSpan\":22, \"records\":[],"
|
||||
+ "\"maxNormalizedProbability\":0, \"anomalyScore\":0,\"recordCount\":0,\"eventCount\":806,\"bucketInfluencers\":["
|
||||
+ "{\"jobId\":\"foo\",\"anomalyScore\":0, \"probability\":0.0, \"influencerFieldName\":\"bucketTime\","
|
||||
+ "\"initialAnomalyScore\":0.0}]}},{\"quantiles\": {\"jobId\":\"foo\", \"quantileState\":\"[normaliser 1.1, normaliser 2.1]\"}}"
|
||||
+ ",{\"bucket\": {\"jobId\":\"foo\",\"timestamp\":1359453600000,\"bucketSpan\":22,"
|
||||
+ "\"records\":[{\"jobId\":\"foo\",\"probability\":0.0637541,"
|
||||
+ "\"byFieldName\":\"airline\",\"byFieldValue\":\"JZA\", \"typical\":[1020.08],\"actual\":[1042.14],"
|
||||
+ "\"fieldName\":\"responsetime\",\"function\":\"max\",\"partitionFieldName\":\"\",\"partitionFieldValue\":\"\"},"
|
||||
+ "{\"jobId\":\"foo\",\"probability\":0.00748292,\"byFieldName\":\"airline\",\"byFieldValue\":\"AMX\", "
|
||||
+ "\"typical\":[20.2137],\"actual\":[22.8855],\"fieldName\":\"responsetime\",\"function\":\"max\",\"partitionFieldName\":\"\","
|
||||
+ " \"partitionFieldValue\":\"\"},{\"jobId\":\"foo\",\"probability\":0.023494,\"byFieldName\":\"airline\","
|
||||
+ "\"byFieldValue\":\"DAL\", \"typical\":[382.177],\"actual\":[358.934],\"fieldName\":\"responsetime\",\"function\":\"min\","
|
||||
+ "\"partitionFieldName\":\"\", \"partitionFieldValue\":\"\"},{\"jobId\":\"foo\",\"probability\":0.0473552,"
|
||||
+ "\"byFieldName\":\"airline\",\"byFieldValue\":\"SWA\", \"typical\":[152.148],\"actual\":[96.6425],"
|
||||
+ "\"fieldName\":\"responsetime\",\"function\":\"min\",\"partitionFieldName\":\"\",\"partitionFieldValue\":\"\"}],"
|
||||
+ "\"initialAnomalyScore\":0.0140005, \"anomalyScore\":20.22688, \"maxNormalizedProbability\":10.5688, \"recordCount\":4,"
|
||||
+ "\"eventCount\":820,\"bucketInfluencers\":[{\"jobId\":\"foo\", \"rawAnomalyScore\":"
|
||||
+ "0.0140005, \"probability\":0.01,\"influencerFieldName\":\"bucketTime\",\"initialAnomalyScore\":20.22688"
|
||||
+ ",\"anomalyScore\":20.22688} ,{\"jobId\":\"foo\",\"rawAnomalyScore\":0.005, \"probability\":0.03,"
|
||||
+ "\"influencerFieldName\":\"foo\",\"initialAnomalyScore\":10.5,\"anomalyScore\":10.5}]}},{\"quantiles\": {\"jobId\":\"foo\","
|
||||
+ "\"quantileState\":\"[normaliser 1.2, normaliser 2.2]\"}} ,{\"flush\": {\"id\":\"testing1\"}} ,"
|
||||
+ "{\"quantiles\": {\"jobId\":\"foo\", \"quantileState\":\"[normaliser 1.3, normaliser 2.3]\"}} ]";
|
||||
public static final String METRIC_OUTPUT_SAMPLE = "[{\"bucket\": {\"job_id\":\"foo\",\"timestamp\":1359450000000,"
|
||||
+ "\"bucket_span\":22, \"records\":[],"
|
||||
+ "\"max_normalized_probability\":0, \"anomaly_score\":0,\"record_count\":0,\"event_count\":806,\"bucket_influencers\":["
|
||||
+ "{\"job_id\":\"foo\",\"anomaly_score\":0, \"probability\":0.0, \"influencer_field_name\":\"bucket_time\","
|
||||
+ "\"initial_anomaly_score\":0.0}]}},{\"quantiles\": {\"job_id\":\"foo\", \"quantile_state\":\"[normaliser 1.1, normaliser 2" +
|
||||
".1]\"}}"
|
||||
+ ",{\"bucket\": {\"job_id\":\"foo\",\"timestamp\":1359453600000,\"bucket_span\":22,"
|
||||
+ "\"records\":[{\"job_id\":\"foo\",\"probability\":0.0637541,"
|
||||
+ "\"by_field_name\":\"airline\",\"by_field_value\":\"JZA\", \"typical\":[1020.08],\"actual\":[1042.14],"
|
||||
+ "\"field_name\":\"responsetime\",\"function\":\"max\",\"partition_field_name\":\"\",\"partition_field_value\":\"\"},"
|
||||
+ "{\"job_id\":\"foo\",\"probability\":0.00748292,\"by_field_name\":\"airline\",\"by_field_value\":\"AMX\", "
|
||||
+ "\"typical\":[20.2137],\"actual\":[22.8855],\"field_name\":\"responsetime\",\"function\":\"max\"," +
|
||||
"\"partition_field_name\":\"\","
|
||||
+ " \"partition_field_value\":\"\"},{\"job_id\":\"foo\",\"probability\":0.023494,\"by_field_name\":\"airline\","
|
||||
+ "\"by_field_value\":\"DAL\", \"typical\":[382.177],\"actual\":[358.934],\"field_name\":\"responsetime\",\"function\":\"min\","
|
||||
+ "\"partition_field_name\":\"\", \"partition_field_value\":\"\"},{\"job_id\":\"foo\",\"probability\":0.0473552,"
|
||||
+ "\"by_field_name\":\"airline\",\"by_field_value\":\"SWA\", \"typical\":[152.148],\"actual\":[96.6425],"
|
||||
+ "\"field_name\":\"responsetime\",\"function\":\"min\",\"partition_field_name\":\"\",\"partition_field_value\":\"\"}],"
|
||||
+ "\"initial_anomaly_score\":0.0140005, \"anomaly_score\":20.22688, \"max_normalized_probability\":10.5688, \"record_count\":4,"
|
||||
+ "\"event_count\":820,\"bucket_influencers\":[{\"job_id\":\"foo\", \"raw_anomaly_score\":"
|
||||
+ "0.0140005, \"probability\":0.01,\"influencer_field_name\":\"bucket_time\",\"initial_anomaly_score\":20.22688"
|
||||
+ ",\"anomaly_score\":20.22688} ,{\"job_id\":\"foo\",\"raw_anomaly_score\":0.005, \"probability\":0.03,"
|
||||
+ "\"influencer_field_name\":\"foo\",\"initial_anomaly_score\":10.5,\"anomaly_score\":10.5}]}},{\"quantiles\": " +
|
||||
"{\"job_id\":\"foo\","
|
||||
+ "\"quantile_state\":\"[normaliser 1.2, normaliser 2.2]\"}} ,{\"flush\": {\"id\":\"testing1\"}} ,"
|
||||
+ "{\"quantiles\": {\"job_id\":\"foo\", \"quantile_state\":\"[normaliser 1.3, normaliser 2.3]\"}} ]";
|
||||
|
||||
public static final String POPULATION_OUTPUT_SAMPLE = "[{\"timestamp\":1379590200,\"records\":[{\"probability\":1.38951e-08,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"mail.google.com\",\"function\":\"max\","
|
||||
+ "\"causes\":[{\"probability\":1.38951e-08,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"mail.google.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[9.19027e+07]}],"
|
||||
+ "\"normalizedProbability\":100,\"anomalyScore\":44.7324},{\"probability\":3.86587e-07,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"armmf.adobe.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":3.86587e-07,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"armmf.adobe.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[3.20093e+07]}],"
|
||||
+ "\"normalizedProbability\":89.5834,\"anomalyScore\":44.7324},{\"probability\":0.00500083,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"0.docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00500083,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"0.docs.google.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[6.61812e+06]}],"
|
||||
+ "\"normalizedProbability\":1.19856,\"anomalyScore\":44.7324},{\"probability\":0.0152333,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"emea.salesforce.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0152333,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"emea.salesforce.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[5.36373e+06]}],"
|
||||
+ "\"normalizedProbability\":0.303996,\"anomalyScore\":44.7324}],\"rawAnomalyScore\":1.30397,\"anomalyScore\":44.7324,"
|
||||
+ "\"maxNormalizedProbability\":100,\"recordCount\":4,\"eventCount\":1235}" + ",{\"flush\":\"testing2\"}"
|
||||
+ ",{\"timestamp\":1379590800,\"records\":[{\"probability\":1.9008e-08,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"mail.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":1.9008e-08,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"mail.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.1498e+08]}],"
|
||||
+ "\"normalizedProbability\":93.6213,\"anomalyScore\":1.19192},{\"probability\":1.01013e-06,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"armmf.adobe.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":1.01013e-06,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"armmf.adobe.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[3.25808e+07]}],"
|
||||
+ "\"normalizedProbability\":86.5825,\"anomalyScore\":1.19192},{\"probability\":0.000386185,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"0.docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.000386185,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"0.docs.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[3.22855e+06]}],"
|
||||
+ "\"normalizedProbability\":17.1179,\"anomalyScore\":1.19192},{\"probability\":0.00208033,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00208033,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"docs.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.43328e+06]}],"
|
||||
+ "\"normalizedProbability\":3.0692,\"anomalyScore\":1.19192},{\"probability\":0.00312988,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"booking2.airasia.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00312988,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"booking2.airasia.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.15764e+06]}],"
|
||||
+ "\"normalizedProbability\":1.99532,\"anomalyScore\":1.19192},{\"probability\":0.00379229,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.facebook.com\",\"function\":\"max\",\"causes\":["
|
||||
+ "{\"probability\":0.00379229,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.facebook.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.0443e+06]}],"
|
||||
+ "\"normalizedProbability\":1.62352,\"anomalyScore\":1.19192},{\"probability\":0.00623576,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.airasia.com\",\"function\":\"max\",\"causes\":["
|
||||
+ "{\"probability\":0.00623576,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.airasia.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[792699]}],"
|
||||
+ "\"normalizedProbability\":0.935134,\"anomalyScore\":1.19192},{\"probability\":0.00665308,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.google.com\",\"function\":\"max\",\"causes\":["
|
||||
+ "{\"probability\":0.00665308,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[763985]}],"
|
||||
+ "\"normalizedProbability\":0.868119,\"anomalyScore\":1.19192},{\"probability\":0.00709315,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"0.drive.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00709315,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"0.drive.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[736442]}],"
|
||||
+ "\"normalizedProbability\":0.805994,\"anomalyScore\":1.19192},{\"probability\":0.00755789,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"resources2.news.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00755789,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"resources2.news.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[709962]}],"
|
||||
+ "\"normalizedProbability\":0.748239,\"anomalyScore\":1.19192},{\"probability\":0.00834974,\"fieldName\":"
|
||||
+ "\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.calypso.net.au\",\"function\":\"max\","
|
||||
+ "\"causes\":[{\"probability\":0.00834974,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.calypso.net.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[669968]}],"
|
||||
+ "\"normalizedProbability\":0.664644,\"anomalyScore\":1.19192},{\"probability\":0.0107711,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"ad.yieldmanager.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0107711,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"ad.yieldmanager.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[576067]}],"
|
||||
+ "\"normalizedProbability\":0.485277,\"anomalyScore\":1.19192},{\"probability\":0.0123367,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.google-analytics.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0123367,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.google-analytics.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[530594]}],"
|
||||
+ "\"normalizedProbability\":0.406783,\"anomalyScore\":1.19192},{\"probability\":0.0125647,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"bs.serving-sys.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0125647,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"bs.serving-sys.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[524690]}],"
|
||||
+ "\"normalizedProbability\":0.396986,\"anomalyScore\":1.19192},{\"probability\":0.0141652,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.google.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0141652,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.google.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[487328]}],"
|
||||
+ "\"normalizedProbability\":0.337075,\"anomalyScore\":1.19192},{\"probability\":0.0141742,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"resources1.news.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0141742,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"resources1.news.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[487136]}],"
|
||||
+ "\"normalizedProbability\":0.336776,\"anomalyScore\":1.19192},{\"probability\":0.0145263,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"b.mail.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0145263,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"b.mail.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[479766]}],"
|
||||
+ "\"normalizedProbability\":0.325385,\"anomalyScore\":1.19192},{\"probability\":0.0151447,\"fieldName\":\"sum_cs_bytes_\","
|
||||
+ "\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.rei.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0151447,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.rei.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[467450]}],\"normalizedProbability\":0.306657,\"anomalyScore\":1.19192},"
|
||||
+ "{\"probability\":0.0164073,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"s3.amazonaws.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0164073,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"s3.amazonaws.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[444511]}],\"normalizedProbability\":0.272805,\"anomalyScore\":1.19192},"
|
||||
+ "{\"probability\":0.0201927,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"0-p-06-ash2.channel.facebook.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0201927,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"0-p-06-ash2.channel.facebook.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[389243]}],\"normalizedProbability\":0.196685,\"anomalyScore\":1.19192},"
|
||||
+ "{\"probability\":0.0218721,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"booking.airasia.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0218721,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"booking.airasia.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[369509]}],\"normalizedProbability\":0.171353,"
|
||||
+ "\"anomalyScore\":1.19192},{\"probability\":0.0242411,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.yammer.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0242411,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.yammer.com\",\"function\":\"max\","
|
||||
+ "\"typical\":[31356],\"actual\":[345295]}],\"normalizedProbability\":0.141585,\"anomalyScore\":1.19192},"
|
||||
+ "{\"probability\":0.0258232,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"safebrowsing-cache.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0258232,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"safebrowsing-cache.google.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[331051]}],\"normalizedProbability\":0.124748,\"anomalyScore\":1.19192},"
|
||||
+ "{\"probability\":0.0259695,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"fbcdn-profile-a.akamaihd.net\",\"function\":\"max\",\"causes\":[{\"probability\":0.0259695,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"fbcdn-profile-a.akamaihd.net\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[329801]}],\"normalizedProbability\":0.123294,\"anomalyScore\":1.19192},"
|
||||
+ "{\"probability\":0.0268874,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.oag.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0268874,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.oag.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[322200]}],\"normalizedProbability\":0.114537,"
|
||||
+ "\"anomalyScore\":1.19192},{\"probability\":0.0279146,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"booking.qatarairways.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0279146,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"booking.qatarairways.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[314153]}],\"normalizedProbability\":0.105419,\"anomalyScore\":1.19192},"
|
||||
+ "{\"probability\":0.0309351,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"resources3.news.com.au\",\"function\":\"max\",\"causes\":[{\"probability\":0.0309351,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"resources3.news.com.au\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[292918]}],\"normalizedProbability\":0.0821156,\"anomalyScore\":1.19192}"
|
||||
+ ",{\"probability\":0.0335204,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"resources0.news.com.au\",\"function\":\"max\",\"causes\":[{\"probability\":0.0335204,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"resources0.news.com.au\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[277136]}],\"normalizedProbability\":0.0655063,\"anomalyScore\":1.19192}"
|
||||
+ ",{\"probability\":0.0354927,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.southwest.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0354927,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.southwest.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[266310]}],\"normalizedProbability\":0.0544615,"
|
||||
+ "\"anomalyScore\":1.19192},{\"probability\":0.0392043,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"syndication.twimg.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0392043,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"syndication.twimg.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[248276]}],\"normalizedProbability\":0.0366913,\"anomalyScore\":1.19192}"
|
||||
+ ",{\"probability\":0.0400853,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\""
|
||||
+ ",\"overFieldValue\":\"mts0.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0400853,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"mts0.google.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[244381]}],\"normalizedProbability\":0.0329562,"
|
||||
+ "\"anomalyScore\":1.19192},{\"probability\":0.0407335,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"www.onthegotours.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0407335,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"www.onthegotours.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[241600]}],\"normalizedProbability\":0.0303116,"
|
||||
+ "\"anomalyScore\":1.19192},{\"probability\":0.0470889,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"chatenabled.mail.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0470889,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"chatenabled.mail.google.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[217573]}],\"normalizedProbability\":0.00823738,"
|
||||
+ "\"anomalyScore\":1.19192},{\"probability\":0.0491243,\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\","
|
||||
+ "\"overFieldValue\":\"googleads.g.doubleclick.net\",\"function\":\"max\",\"causes\":[{\"probability\":0.0491243,"
|
||||
+ "\"fieldName\":\"sum_cs_bytes_\",\"overFieldName\":\"cs_host\",\"overFieldValue\":\"googleads.g.doubleclick.net\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[210926]}],\"normalizedProbability\":0.00237509,"
|
||||
+ "\"anomalyScore\":1.19192}],\"rawAnomalyScore\":1.26918,\"anomalyScore\":1.19192,\"maxNormalizedProbability\":93.6213,"
|
||||
+ "\"recordCount\":34,\"eventCount\":1159}" + "]";
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"mail.google.com\"," +
|
||||
"\"function\":\"max\","
|
||||
+ "\"causes\":[{\"probability\":1.38951e-08,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[9.19027e+07]}],"
|
||||
+ "\"normalized_probability\":100,\"anomaly_score\":44.7324},{\"probability\":3.86587e-07,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":3.86587e-07,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[3.20093e+07]}],"
|
||||
+ "\"normalized_probability\":89.5834,\"anomaly_score\":44.7324},{\"probability\":0.00500083,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00500083,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[6.61812e+06]}],"
|
||||
+ "\"normalized_probability\":1.19856,\"anomaly_score\":44.7324},{\"probability\":0.0152333,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"emea.salesforce.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0152333,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"emea.salesforce.com\",\"function\":\"max\",\"typical\":[101534],\"actual\":[5.36373e+06]}],"
|
||||
+ "\"normalized_probability\":0.303996,\"anomaly_score\":44.7324}],\"raw_anomaly_score\":1.30397,\"anomaly_score\":44.7324,"
|
||||
+ "\"max_normalized_probability\":100,\"record_count\":4,\"event_count\":1235}" + ",{\"flush\":\"testing2\"}"
|
||||
+ ",{\"timestamp\":1379590800,\"records\":[{\"probability\":1.9008e-08,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":1.9008e-08,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"mail.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.1498e+08]}],"
|
||||
+ "\"normalized_probability\":93.6213,\"anomaly_score\":1.19192},{\"probability\":1.01013e-06,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":1.01013e-06,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"armmf.adobe.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[3.25808e+07]}],"
|
||||
+ "\"normalized_probability\":86.5825,\"anomaly_score\":1.19192},{\"probability\":0.000386185,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.000386185,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"0.docs.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[3.22855e+06]}],"
|
||||
+ "\"normalized_probability\":17.1179,\"anomaly_score\":1.19192},{\"probability\":0.00208033,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"docs.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00208033,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"docs.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.43328e+06]}],"
|
||||
+ "\"normalized_probability\":3.0692,\"anomaly_score\":1.19192},{\"probability\":0.00312988,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking2.airasia.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00312988,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"booking2.airasia.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.15764e+06]}],"
|
||||
+ "\"normalized_probability\":1.99532,\"anomaly_score\":1.19192},{\"probability\":0.00379229,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.facebook.com\",\"function\":\"max\",\"causes\":["
|
||||
+ "{\"probability\":0.00379229,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.facebook.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[1.0443e+06]}],"
|
||||
+ "\"normalized_probability\":1.62352,\"anomaly_score\":1.19192},{\"probability\":0.00623576,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.airasia.com\",\"function\":\"max\",\"causes\":["
|
||||
+ "{\"probability\":0.00623576,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.airasia.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[792699]}],"
|
||||
+ "\"normalized_probability\":0.935134,\"anomaly_score\":1.19192},{\"probability\":0.00665308,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google.com\",\"function\":\"max\",\"causes\":["
|
||||
+ "{\"probability\":0.00665308,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[763985]}],"
|
||||
+ "\"normalized_probability\":0.868119,\"anomaly_score\":1.19192},{\"probability\":0.00709315,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"0.drive.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00709315,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"0.drive.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[736442]}],"
|
||||
+ "\"normalized_probability\":0.805994,\"anomaly_score\":1.19192},{\"probability\":0.00755789,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources2.news.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.00755789,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"resources2.news.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[709962]}],"
|
||||
+ "\"normalized_probability\":0.748239,\"anomaly_score\":1.19192},{\"probability\":0.00834974,\"field_name\":"
|
||||
+ "\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.calypso.net.au\",\"function\":\"max\","
|
||||
+ "\"causes\":[{\"probability\":0.00834974,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.calypso.net.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[669968]}],"
|
||||
+ "\"normalized_probability\":0.664644,\"anomaly_score\":1.19192},{\"probability\":0.0107711,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"ad.yieldmanager.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0107711,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"ad.yieldmanager.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[576067]}],"
|
||||
+ "\"normalized_probability\":0.485277,\"anomaly_score\":1.19192},{\"probability\":0.0123367,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google-analytics.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0123367,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.google-analytics.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[530594]}],"
|
||||
+ "\"normalized_probability\":0.406783,\"anomaly_score\":1.19192},{\"probability\":0.0125647,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"bs.serving-sys.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0125647,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"bs.serving-sys.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[524690]}],"
|
||||
+ "\"normalized_probability\":0.396986,\"anomaly_score\":1.19192},{\"probability\":0.0141652,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.google.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0141652,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.google.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[487328]}],"
|
||||
+ "\"normalized_probability\":0.337075,\"anomaly_score\":1.19192},{\"probability\":0.0141742,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources1.news.com.au\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0141742,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"resources1.news.com.au\",\"function\":\"max\",\"typical\":[31356],\"actual\":[487136]}],"
|
||||
+ "\"normalized_probability\":0.336776,\"anomaly_score\":1.19192},{\"probability\":0.0145263,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"b.mail.google.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0145263,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"b.mail.google.com\",\"function\":\"max\",\"typical\":[31356],\"actual\":[479766]}],"
|
||||
+ "\"normalized_probability\":0.325385,\"anomaly_score\":1.19192},{\"probability\":0.0151447,\"field_name\":\"sum_cs_bytes_\","
|
||||
+ "\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.rei.com\",\"function\":\"max\",\"causes\":[{"
|
||||
+ "\"probability\":0.0151447,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.rei" +
|
||||
".com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[467450]}],\"normalized_probability\":0.306657,\"anomaly_score\":1" +
|
||||
".19192},"
|
||||
+ "{\"probability\":0.0164073,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"s3.amazonaws.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0164073,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"s3.amazonaws.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[444511]}],\"normalized_probability\":0.272805,\"anomaly_score\":1" +
|
||||
".19192},"
|
||||
+ "{\"probability\":0.0201927,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"0-p-06-ash2.channel.facebook.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0201927,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"0-p-06-ash2.channel.facebook.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[389243]}],\"normalized_probability\":0.196685,\"anomaly_score\":1" +
|
||||
".19192},"
|
||||
+ "{\"probability\":0.0218721,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"booking.airasia.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0218721,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking.airasia.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[369509]}],\"normalized_probability\":0.171353,"
|
||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0242411,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.yammer.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0242411,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.yammer.com\"," +
|
||||
"\"function\":\"max\","
|
||||
+ "\"typical\":[31356],\"actual\":[345295]}],\"normalized_probability\":0.141585,\"anomaly_score\":1.19192},"
|
||||
+ "{\"probability\":0.0258232,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"safebrowsing-cache.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0258232,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"safebrowsing-cache.google.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[331051]}],\"normalized_probability\":0.124748,\"anomaly_score\":1" +
|
||||
".19192},"
|
||||
+ "{\"probability\":0.0259695,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"fbcdn-profile-a.akamaihd.net\",\"function\":\"max\",\"causes\":[{\"probability\":0.0259695,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"fbcdn-profile-a.akamaihd.net\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[329801]}],\"normalized_probability\":0.123294,\"anomaly_score\":1" +
|
||||
".19192},"
|
||||
+ "{\"probability\":0.0268874,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.oag.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0268874,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.oag.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[322200]}],\"normalized_probability\":0.114537,"
|
||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0279146,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"booking.qatarairways.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0279146,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"booking.qatarairways.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[314153]}],\"normalized_probability\":0.105419,\"anomaly_score\":1" +
|
||||
".19192},"
|
||||
+ "{\"probability\":0.0309351,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"resources3.news.com.au\",\"function\":\"max\",\"causes\":[{\"probability\":0.0309351,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources3.news.com.au\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[292918]}],\"normalized_probability\":0.0821156,\"anomaly_score\":1" +
|
||||
".19192}"
|
||||
+ ",{\"probability\":0.0335204,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"resources0.news.com.au\",\"function\":\"max\",\"causes\":[{\"probability\":0.0335204,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"resources0.news.com.au\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[277136]}],\"normalized_probability\":0.0655063,\"anomaly_score\":1" +
|
||||
".19192}"
|
||||
+ ",{\"probability\":0.0354927,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.southwest.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0354927,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.southwest.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[266310]}],\"normalized_probability\":0.0544615,"
|
||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0392043,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"syndication.twimg.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0392043,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"syndication.twimg.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[248276]}],\"normalized_probability\":0.0366913,\"anomaly_score\":1" +
|
||||
".19192}"
|
||||
+ ",{\"probability\":0.0400853,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\""
|
||||
+ ",\"over_field_value\":\"mts0.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0400853,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"mts0.google.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[244381]}],\"normalized_probability\":0.0329562,"
|
||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0407335,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"www.onthegotours.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0407335,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"www.onthegotours.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[241600]}],\"normalized_probability\":0.0303116,"
|
||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0470889,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"chatenabled.mail.google.com\",\"function\":\"max\",\"causes\":[{\"probability\":0.0470889,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"chatenabled.mail.google.com\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[217573]}],\"normalized_probability\":0.00823738,"
|
||||
+ "\"anomaly_score\":1.19192},{\"probability\":0.0491243,\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\","
|
||||
+ "\"over_field_value\":\"googleads.g.doubleclick.net\",\"function\":\"max\",\"causes\":[{\"probability\":0.0491243,"
|
||||
+ "\"field_name\":\"sum_cs_bytes_\",\"over_field_name\":\"cs_host\",\"over_field_value\":\"googleads.g.doubleclick.net\","
|
||||
+ "\"function\":\"max\",\"typical\":[31356],\"actual\":[210926]}],\"normalized_probability\":0.00237509,"
|
||||
+ "\"anomaly_score\":1.19192}],\"raw_anomaly_score\":1.26918,\"anomaly_score\":1.19192,\"max_normalized_probability\":93.6213,"
|
||||
+ "\"record_count\":34,\"event_count\":1159}" + "]";
|
||||
|
||||
public void testParser() throws IOException {
|
||||
InputStream inputStream = new ByteArrayInputStream(METRIC_OUTPUT_SAMPLE.getBytes(StandardCharsets.UTF_8));
|
||||
|
@ -230,7 +245,7 @@ public class AutodetectResultsParserTests extends ESTestCase {
|
|||
assertEquals(0.0, bucketInfluencers.get(0).getRawAnomalyScore(), EPSILON);
|
||||
assertEquals(0.0, bucketInfluencers.get(0).getAnomalyScore(), EPSILON);
|
||||
assertEquals(0.0, bucketInfluencers.get(0).getProbability(), EPSILON);
|
||||
assertEquals("bucketTime", bucketInfluencers.get(0).getInfluencerFieldName());
|
||||
assertEquals("bucket_time", bucketInfluencers.get(0).getInfluencerFieldName());
|
||||
|
||||
assertEquals(new Date(1359453600000L), buckets.get(1).getTimestamp());
|
||||
assertEquals(4, buckets.get(1).getRecordCount());
|
||||
|
@ -241,7 +256,7 @@ public class AutodetectResultsParserTests extends ESTestCase {
|
|||
assertEquals(0.0140005, bucketInfluencers.get(0).getRawAnomalyScore(), EPSILON);
|
||||
assertEquals(20.22688, bucketInfluencers.get(0).getAnomalyScore(), EPSILON);
|
||||
assertEquals(0.01, bucketInfluencers.get(0).getProbability(), EPSILON);
|
||||
assertEquals("bucketTime", bucketInfluencers.get(0).getInfluencerFieldName());
|
||||
assertEquals("bucket_time", bucketInfluencers.get(0).getInfluencerFieldName());
|
||||
assertEquals(0.005, bucketInfluencers.get(1).getRawAnomalyScore(), EPSILON);
|
||||
assertEquals(10.5, bucketInfluencers.get(1).getAnomalyScore(), EPSILON);
|
||||
assertEquals(0.03, bucketInfluencers.get(1).getProbability(), EPSILON);
|
||||
|
@ -340,7 +355,7 @@ public class AutodetectResultsParserTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testParse_GivenModelSizeStats() throws ElasticsearchParseException, IOException {
|
||||
String json = "[{\"modelSizeStats\": {\"jobId\": \"foo\", \"modelBytes\":300}}]";
|
||||
String json = "[{\"model_size_stats\": {\"job_id\": \"foo\", \"model_bytes\":300}}]";
|
||||
InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY, () -> ParseFieldMatcher.STRICT);
|
||||
|
@ -352,7 +367,7 @@ public class AutodetectResultsParserTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testParse_GivenCategoryDefinition() throws IOException {
|
||||
String json = "[{\"categoryDefinition\": {\"jobId\":\"foo\", \"categoryId\":18}}]";
|
||||
String json = "[{\"category_definition\": {\"job_id\":\"foo\", \"category_id\":18}}]";
|
||||
InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8));
|
||||
AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY, () -> ParseFieldMatcher.STRICT);
|
||||
List<AutodetectResult> results = new ArrayList<>();
|
||||
|
|
|
@ -63,7 +63,7 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
|
||||
public void testInputFields_MulitpleInputsSingleOutput() throws IOException {
|
||||
DataDescription.Builder dd = new DataDescription.Builder();
|
||||
dd.setTimeField("timeField");
|
||||
dd.setTimeField("time_field");
|
||||
|
||||
Detector.Builder detector = new Detector.Builder("metric", "value");
|
||||
detector.setByFieldName("host-metric");
|
||||
|
@ -81,12 +81,12 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
|
||||
Set<String> inputFields = new HashSet<>(writer.inputFields());
|
||||
assertEquals(4, inputFields.size());
|
||||
assertTrue(inputFields.contains("timeField"));
|
||||
assertTrue(inputFields.contains("time_field"));
|
||||
assertTrue(inputFields.contains("value"));
|
||||
assertTrue(inputFields.contains("host"));
|
||||
assertTrue(inputFields.contains("metric"));
|
||||
|
||||
String[] header = { "timeField", "metric", "host", "value" };
|
||||
String[] header = { "time_field", "metric", "host", "value" };
|
||||
writer.buildTransformsAndWriteHeader(header);
|
||||
List<Transform> trs = writer.postDateTransforms;
|
||||
assertEquals(1, trs.size());
|
||||
|
@ -101,14 +101,14 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
|
||||
Map<String, Integer> inputIndexes = writer.getInputFieldIndexes();
|
||||
assertEquals(4, inputIndexes.size());
|
||||
Assert.assertEquals(new Integer(0), inputIndexes.get("timeField"));
|
||||
Assert.assertEquals(new Integer(0), inputIndexes.get("time_field"));
|
||||
Assert.assertEquals(new Integer(1), inputIndexes.get("metric"));
|
||||
Assert.assertEquals(new Integer(2), inputIndexes.get("host"));
|
||||
Assert.assertEquals(new Integer(3), inputIndexes.get("value"));
|
||||
|
||||
Map<String, Integer> outputIndexes = writer.getOutputFieldIndexes();
|
||||
assertEquals(4, outputIndexes.size());
|
||||
Assert.assertEquals(new Integer(0), outputIndexes.get("timeField"));
|
||||
Assert.assertEquals(new Integer(0), outputIndexes.get("time_field"));
|
||||
Assert.assertEquals(new Integer(1), outputIndexes.get("host-metric"));
|
||||
Assert.assertEquals(new Integer(2), outputIndexes.get("value"));
|
||||
Assert.assertEquals(new Integer(3), outputIndexes.get(LengthEncodedWriter.CONTROL_FIELD_NAME));
|
||||
|
@ -121,7 +121,7 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
|
||||
public void testInputFields_SingleInputMulitpleOutputs() throws IOException {
|
||||
DataDescription.Builder dd = new DataDescription.Builder();
|
||||
dd.setTimeField("timeField");
|
||||
dd.setTimeField("time_field");
|
||||
|
||||
Detector.Builder detector = new Detector.Builder("metric", "value");
|
||||
detector.setByFieldName(TransformType.DOMAIN_SPLIT.defaultOutputNames().get(0));
|
||||
|
@ -139,18 +139,18 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
Set<String> inputFields = new HashSet<>(writer.inputFields());
|
||||
|
||||
assertEquals(3, inputFields.size());
|
||||
assertTrue(inputFields.contains("timeField"));
|
||||
assertTrue(inputFields.contains("time_field"));
|
||||
assertTrue(inputFields.contains("value"));
|
||||
assertTrue(inputFields.contains("domain"));
|
||||
|
||||
String[] header = { "timeField", "domain", "value" };
|
||||
String[] header = { "time_field", "domain", "value" };
|
||||
writer.buildTransformsAndWriteHeader(header);
|
||||
List<Transform> trs = writer.postDateTransforms;
|
||||
assertEquals(1, trs.size());
|
||||
|
||||
Map<String, Integer> inputIndexes = writer.getInputFieldIndexes();
|
||||
assertEquals(3, inputIndexes.size());
|
||||
Assert.assertEquals(new Integer(0), inputIndexes.get("timeField"));
|
||||
Assert.assertEquals(new Integer(0), inputIndexes.get("time_field"));
|
||||
Assert.assertEquals(new Integer(1), inputIndexes.get("domain"));
|
||||
Assert.assertEquals(new Integer(2), inputIndexes.get("value"));
|
||||
|
||||
|
@ -161,7 +161,7 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
Collections.sort(allOutputs); // outputs are in alphabetical order
|
||||
|
||||
assertEquals(5, outputIndexes.size()); // time + control field + outputs
|
||||
Assert.assertEquals(new Integer(0), outputIndexes.get("timeField"));
|
||||
Assert.assertEquals(new Integer(0), outputIndexes.get("time_field"));
|
||||
|
||||
int count = 1;
|
||||
for (String f : allOutputs) {
|
||||
|
@ -191,7 +191,7 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
|
||||
public void testInputFields_SingleInputMulitpleOutputs_OnlyOneOutputUsed() throws IOException {
|
||||
DataDescription.Builder dd = new DataDescription.Builder();
|
||||
dd.setTimeField("timeField");
|
||||
dd.setTimeField("time_field");
|
||||
|
||||
Detector.Builder detector = new Detector.Builder("metric", "value");
|
||||
detector.setByFieldName(TransformType.DOMAIN_SPLIT.defaultOutputNames().get(0));
|
||||
|
@ -208,18 +208,18 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
Set<String> inputFields = new HashSet<>(writer.inputFields());
|
||||
|
||||
assertEquals(3, inputFields.size());
|
||||
assertTrue(inputFields.contains("timeField"));
|
||||
assertTrue(inputFields.contains("time_field"));
|
||||
assertTrue(inputFields.contains("value"));
|
||||
assertTrue(inputFields.contains("domain"));
|
||||
|
||||
String[] header = { "timeField", "domain", "value" };
|
||||
String[] header = { "time_field", "domain", "value" };
|
||||
writer.buildTransformsAndWriteHeader(header);
|
||||
List<Transform> trs = writer.postDateTransforms;
|
||||
assertEquals(1, trs.size());
|
||||
|
||||
Map<String, Integer> inputIndexes = writer.getInputFieldIndexes();
|
||||
assertEquals(3, inputIndexes.size());
|
||||
Assert.assertEquals(new Integer(0), inputIndexes.get("timeField"));
|
||||
Assert.assertEquals(new Integer(0), inputIndexes.get("time_field"));
|
||||
Assert.assertEquals(new Integer(1), inputIndexes.get("domain"));
|
||||
Assert.assertEquals(new Integer(2), inputIndexes.get("value"));
|
||||
|
||||
|
@ -231,7 +231,7 @@ public class AbstractDataToProcessWriterTests extends ESTestCase {
|
|||
Collections.sort(allOutputs); // outputs are in alphabetical order
|
||||
|
||||
assertEquals(4, outputIndexes.size()); // time + control field + outputs
|
||||
Assert.assertEquals(new Integer(0), outputIndexes.get("timeField"));
|
||||
Assert.assertEquals(new Integer(0), outputIndexes.get("time_field"));
|
||||
|
||||
int count = 1;
|
||||
for (String f : allOutputs) {
|
||||
|
|
|
@ -227,7 +227,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenEqualBuckets() {
|
||||
AnomalyRecord record = new AnomalyRecord("jobId");
|
||||
AnomalyRecord record = new AnomalyRecord("job_id");
|
||||
BucketInfluencer bucketInfluencer = new BucketInfluencer("foo");
|
||||
Date date = new Date();
|
||||
|
||||
|
|
|
@ -14,11 +14,11 @@ public class PutModelSnapshotDescriptionTests extends ESTestCase {
|
|||
public void testUpdateDescription_GivenMissingArg() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new PutModelSnapshotDescriptionAction.Request(null, "foo", "bar"));
|
||||
assertEquals("[jobId] must not be null.", e.getMessage());
|
||||
assertEquals("[job_id] must not be null.", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new PutModelSnapshotDescriptionAction.Request("foo", null, "bar"));
|
||||
assertEquals("[snapshotId] must not be null.", e.getMessage());
|
||||
assertEquals("[snapshot_id] must not be null.", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new PutModelSnapshotDescriptionAction.Request("foo", "foo", null));
|
||||
|
|
|
@ -16,21 +16,21 @@
|
|||
}
|
||||
},
|
||||
"params": {
|
||||
"calcInterim": {
|
||||
"calc_interim": {
|
||||
"type": "boolean",
|
||||
"description": "Calculates interim results for the most recent bucket or all buckets within the latency period"
|
||||
},
|
||||
"start": {
|
||||
"type": "string",
|
||||
"description": "When used in conjunction with calcInterim, specifies the range of buckets on which to calculate interim results"
|
||||
"description": "When used in conjunction with calc_interim, specifies the range of buckets on which to calculate interim results"
|
||||
},
|
||||
"end": {
|
||||
"type": "string",
|
||||
"description": "When used in conjunction with calcInterim, specifies the range of buckets on which to calculate interim results"
|
||||
"description": "When used in conjunction with calc_interim, specifies the range of buckets on which to calculate interim results"
|
||||
},
|
||||
"advanceTime": {
|
||||
"advance_time": {
|
||||
"type": "string",
|
||||
"description": "Setting this tells the Engine API that no data prior to advanceTime is expected"
|
||||
"description": "Setting this tells the Engine API that no data prior to advance_time is expected"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -23,11 +23,11 @@
|
|||
"type": "boolean",
|
||||
"description" : "Include anomaly records"
|
||||
},
|
||||
"includeInterim": {
|
||||
"include_interim": {
|
||||
"type": "boolean",
|
||||
"description" : "Include interim results"
|
||||
},
|
||||
"partitionValue": {
|
||||
"partition_value": {
|
||||
"type": "string",
|
||||
"description": "If expand is true filter anomaly records by the partitionFieldValue"
|
||||
},
|
||||
|
@ -47,11 +47,11 @@
|
|||
"type": "string",
|
||||
"description" : "End time filter for buckets"
|
||||
},
|
||||
"anomalyScore": {
|
||||
"anomaly_score": {
|
||||
"type": "double",
|
||||
"description": "Filter for the most anomalous buckets"
|
||||
},
|
||||
"maxNormalizedProbability": {
|
||||
"max_normalized_probability": {
|
||||
"type": "double",
|
||||
"description": "Filter for buckets containing the most anomalous records"
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
}
|
||||
},
|
||||
"params": {
|
||||
"includeInterim": {
|
||||
"include_interim": {
|
||||
"type": "boolean",
|
||||
"description" : "Include interim results"
|
||||
},
|
||||
|
@ -31,7 +31,7 @@
|
|||
"type": "string",
|
||||
"description": "end timestamp for the requested influencers"
|
||||
},
|
||||
"anomalyScore": {
|
||||
"anomaly_score": {
|
||||
"type": "double",
|
||||
"description": "anomaly score threshold for the requested influencers"
|
||||
},
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
"expand": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"includeInterim": {
|
||||
"include_interim": {
|
||||
"type": "boolean",
|
||||
"description" : "Include interim results"
|
||||
},
|
||||
|
@ -34,13 +34,13 @@
|
|||
"type": "string",
|
||||
"description" : "End time filter for records"
|
||||
},
|
||||
"anomalyScore": {
|
||||
"anomaly_score": {
|
||||
"type": "double"
|
||||
},
|
||||
"maxNormalizedProbability": {
|
||||
"max_normalized_probability": {
|
||||
"type": "double"
|
||||
},
|
||||
"partitionValue": {
|
||||
"partition_value": {
|
||||
"type": "string",
|
||||
"description": "Filter anomaly records by the partitionFieldValue"
|
||||
},
|
||||
|
|
|
@ -12,15 +12,15 @@
|
|||
}
|
||||
},
|
||||
"params": {
|
||||
"ignoreDowntime": {
|
||||
"ignore_downtime": {
|
||||
"type": "boolean",
|
||||
"description": "Controls if gaps in data are treated as anomalous or as a maintenance window after a job re-start"
|
||||
},
|
||||
"resetStart": {
|
||||
"reset_start": {
|
||||
"type": "string",
|
||||
"description": "Optional parameter to specify the start of the bucket resetting range"
|
||||
},
|
||||
"resetEnd": {
|
||||
"reset_end": {
|
||||
"type": "string",
|
||||
"description": "Optional parameter to specify the end of the bucket resetting range"
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
"type": "date",
|
||||
"description": "revert to a snapshot with a timestamp no later than this time"
|
||||
},
|
||||
"snapshotId": {
|
||||
"snapshot_id": {
|
||||
"type": "string",
|
||||
"description": "the snapshot ID of the snapshot to revert to"
|
||||
},
|
||||
|
@ -24,7 +24,7 @@
|
|||
"type": "string",
|
||||
"description": "the description of the snapshot to revert to"
|
||||
},
|
||||
"deleteInterveningResults": {
|
||||
"delete_intervening_results": {
|
||||
"type": "boolean",
|
||||
"description": "should we reset the results back to the time of the snapshot?"
|
||||
}
|
||||
|
|
|
@ -3,40 +3,40 @@ setup:
|
|||
xpack.prelert.put_job:
|
||||
body: >
|
||||
{
|
||||
"jobId":"foo",
|
||||
"job_id": "foo",
|
||||
"description":"Analysis of response time by airline",
|
||||
"analysisConfig" : {
|
||||
"bucketSpan":3600,
|
||||
"detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}]
|
||||
"analysis_config" : {
|
||||
"bucket_span" : 3600,
|
||||
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
|
||||
},
|
||||
"dataDescription" : {
|
||||
"fieldDelimiter":",",
|
||||
"timeField":"time",
|
||||
"timeFormat":"yyyy-MM-dd HH:mm:ssX"
|
||||
"data_description" : {
|
||||
"field_delimiter":",",
|
||||
"time_field":"time",
|
||||
"time_format":"yyyy-MM-dd HH:mm:ssX"
|
||||
}
|
||||
}
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelSnapshot
|
||||
type: model_snapshot
|
||||
id: "foo1"
|
||||
body: >
|
||||
{
|
||||
"jobId": "foo",
|
||||
"job_id": "foo",
|
||||
"timestamp": "2016-06-02T00:00:00Z",
|
||||
"restorePriority": "1",
|
||||
"snapshotId": "foo1",
|
||||
"restore_priority": "1",
|
||||
"snapshot_id": "foo1",
|
||||
"description": "first",
|
||||
"latestRecordTimeStamp": "2016-06-02T00:00:00Z",
|
||||
"latestResultTimeStamp": "2016-06-02T00:00:00Z",
|
||||
"snapshotDocCount": 2
|
||||
"latest_record_time_stamp": "2016-06-02T00:00:00Z",
|
||||
"latest_result_time_stamp": "2016-06-02T00:00:00Z",
|
||||
"snapshot_doc_count": 2
|
||||
}
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelState
|
||||
type: model_state
|
||||
id: "foo1_0"
|
||||
body: >
|
||||
{
|
||||
|
@ -46,7 +46,7 @@ setup:
|
|||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelState
|
||||
type: model_state
|
||||
id: "foo1_1"
|
||||
body: >
|
||||
{
|
||||
|
@ -56,17 +56,17 @@ setup:
|
|||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelSnapshot
|
||||
type: model_snapshot
|
||||
id: "foo2"
|
||||
body: >
|
||||
{
|
||||
"jobId": "foo",
|
||||
"job_id": "foo",
|
||||
"timestamp": "2016-06-01T00:00:00Z",
|
||||
"restorePriority": "2",
|
||||
"snapshotId": "foo2",
|
||||
"restore_priority": "2",
|
||||
"snapshot_id": "foo2",
|
||||
"description": "second",
|
||||
"latestRecordTimeStamp": "2016-06-01T00:00:00Z",
|
||||
"latestResultTimeStamp": "2016-06-01T00:00:00Z"
|
||||
"latest_record_time_stamp": "2016-06-01T00:00:00Z",
|
||||
"latest_result_time_stamp": "2016-06-01T00:00:00Z"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -85,7 +85,7 @@ setup:
|
|||
- do:
|
||||
catch: param
|
||||
xpack.prelert.delete_model_snapshot:
|
||||
snapshotId: "foo"
|
||||
snapshot_id: "foo"
|
||||
|
||||
---
|
||||
"Test valid delete snapshot":
|
||||
|
@ -97,7 +97,7 @@ setup:
|
|||
- do:
|
||||
count:
|
||||
index: prelertresults-foo
|
||||
type: modelState
|
||||
type: model_state
|
||||
|
||||
- match: { count: 2 }
|
||||
|
||||
|
@ -115,12 +115,12 @@ setup:
|
|||
xpack.prelert.get_model_snapshots:
|
||||
job_id: "foo"
|
||||
- match: { count: 1 }
|
||||
- match: { modelSnapshots.0.snapshotId: "foo2"}
|
||||
- match: { model_snapshots.0.snapshot_id: "foo2"}
|
||||
|
||||
- do:
|
||||
count:
|
||||
index: prelertresults-foo
|
||||
type: modelState
|
||||
type: model_state
|
||||
|
||||
- match: { count: 0 }
|
||||
|
||||
|
@ -129,7 +129,7 @@ setup:
|
|||
- do:
|
||||
xpack.prelert.revert_model_snapshot:
|
||||
job_id: "foo"
|
||||
snapshotId: "foo2"
|
||||
snapshot_id: "foo2"
|
||||
|
||||
- do:
|
||||
catch: request
|
||||
|
|
|
@ -4,25 +4,25 @@ setup:
|
|||
index: prelertresults-foo
|
||||
body:
|
||||
mappings:
|
||||
modelSnapshot:
|
||||
model_snapshot:
|
||||
properties:
|
||||
"timestamp":
|
||||
type: date
|
||||
"restorePriority":
|
||||
"restore_priority":
|
||||
type: integer
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelSnapshot
|
||||
type: model_snapshot
|
||||
id: "1"
|
||||
body: { "jobId": "foo", "timestamp": "2016-06-02T00:00:00Z", "restorePriority": "1" }
|
||||
body: { "job_id": "foo", "timestamp": "2016-06-02T00:00:00Z", "restore_priority": "1" }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelSnapshot
|
||||
type: model_snapshot
|
||||
id: "2"
|
||||
body: { "jobId": "foo", "timestamp": "2016-06-01T00:00:00Z", "restorePriority": "2" }
|
||||
body: { "job_id": "foo", "timestamp": "2016-06-01T00:00:00Z", "restore_priority": "2" }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
|
@ -36,8 +36,8 @@ setup:
|
|||
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { modelSnapshots.0.restorePriority: 2 }
|
||||
- match: { modelSnapshots.0.timestamp: 1464739200000 }
|
||||
- match: { model_snapshots.0.restore_priority: 2 }
|
||||
- match: { model_snapshots.0.timestamp: 1464739200000 }
|
||||
|
||||
---
|
||||
"Test get model snapshots API with start/end":
|
||||
|
@ -49,8 +49,8 @@ setup:
|
|||
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { modelSnapshots.0.restorePriority: 2 }
|
||||
- match: { modelSnapshots.0.timestamp: 1464739200000 }
|
||||
- match: { model_snapshots.0.restore_priority: 2 }
|
||||
- match: { model_snapshots.0.timestamp: 1464739200000 }
|
||||
|
||||
---
|
||||
"Test get model snapshots API with ascending":
|
||||
|
@ -61,8 +61,8 @@ setup:
|
|||
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { modelSnapshots.0.restorePriority: 1 }
|
||||
- match: { modelSnapshots.0.timestamp: 1464825600000 }
|
||||
- match: { model_snapshots.0.restore_priority: 1 }
|
||||
- match: { model_snapshots.0.timestamp: 1464825600000 }
|
||||
|
||||
---
|
||||
"Test get model snapshots API with size":
|
||||
|
@ -73,9 +73,9 @@ setup:
|
|||
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { modelSnapshots.0.restorePriority: 2 }
|
||||
- match: { modelSnapshots.0.timestamp: 1464739200000 }
|
||||
- length: { modelSnapshots: 1 }
|
||||
- match: { model_snapshots.0.restore_priority: 2 }
|
||||
- match: { model_snapshots.0.timestamp: 1464739200000 }
|
||||
- length: { model_snapshots: 1 }
|
||||
|
||||
---
|
||||
"Test get model snapshots API with from":
|
||||
|
@ -86,6 +86,6 @@ setup:
|
|||
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { modelSnapshots.0.restorePriority: 1 }
|
||||
- match: { modelSnapshots.0.timestamp: 1464825600000 }
|
||||
- length: { modelSnapshots: 1 }
|
||||
- match: { model_snapshots.0.restore_priority: 1 }
|
||||
- match: { model_snapshots.0.timestamp: 1464825600000 }
|
||||
- length: { model_snapshots: 1 }
|
||||
|
|
|
@ -4,19 +4,19 @@
|
|||
xpack.prelert.put_job:
|
||||
body: >
|
||||
{
|
||||
"jobId":"farequote",
|
||||
"job_id":"farequote",
|
||||
"description":"Analysis of response time by airline",
|
||||
"analysisConfig" : {
|
||||
"bucketSpan":3600,
|
||||
"detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}]
|
||||
"analysis_config" : {
|
||||
"bucket_span":3600,
|
||||
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
|
||||
},
|
||||
"dataDescription" : {
|
||||
"fieldDelimiter":",",
|
||||
"timeField":"time",
|
||||
"timeFormat":"yyyy-MM-dd HH:mm:ssX"
|
||||
"data_description" : {
|
||||
"field_delimiter":",",
|
||||
"time_field":"time",
|
||||
"time_format":"yyyy-MM-dd HH:mm:ssX"
|
||||
}
|
||||
}
|
||||
- match: { jobId: "farequote" }
|
||||
- match: { job_id: "farequote" }
|
||||
|
||||
- do:
|
||||
indices.get:
|
||||
|
@ -28,13 +28,13 @@
|
|||
from: 0
|
||||
size: 100
|
||||
- match: { count: 1 }
|
||||
- match: { jobs.0.config.jobId: "farequote" }
|
||||
- match: { jobs.0.config.job_id: "farequote" }
|
||||
|
||||
- do:
|
||||
xpack.prelert.get_jobs:
|
||||
job_id: "farequote"
|
||||
- match: { count: 1 }
|
||||
- match: { jobs.0.config.jobId: "farequote" }
|
||||
- match: { jobs.0.config.job_id: "farequote" }
|
||||
|
||||
- do:
|
||||
xpack.prelert.delete_job:
|
||||
|
@ -64,34 +64,34 @@
|
|||
xpack.prelert.put_job:
|
||||
body: >
|
||||
{
|
||||
"jobId":"farequote",
|
||||
"job_id":"farequote",
|
||||
"description":"Analysis of response time by airline",
|
||||
"analysisConfig" : {
|
||||
"bucketSpan":3600,
|
||||
"detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}]
|
||||
"analysis_config" : {
|
||||
"bucket_span":3600,
|
||||
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
|
||||
},
|
||||
"dataDescription" : {
|
||||
"fieldDelimiter":",",
|
||||
"timeField":"time",
|
||||
"timeFormat":"yyyy-MM-dd HH:mm:ssX"
|
||||
"data_description" : {
|
||||
"field_delimiter":",",
|
||||
"time_field":"time",
|
||||
"time_format":"yyyy-MM-dd HH:mm:ssX"
|
||||
}
|
||||
}
|
||||
- match: { jobId: "farequote" }
|
||||
- match: { job_id: "farequote" }
|
||||
|
||||
- do:
|
||||
catch: /resource_already_exists_exception/
|
||||
xpack.prelert.put_job:
|
||||
body: >
|
||||
{
|
||||
"jobId":"farequote",
|
||||
"job_id":"farequote",
|
||||
"description":"Analysis of response time by airline",
|
||||
"analysisConfig" : {
|
||||
"bucketSpan":3600,
|
||||
"detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}]
|
||||
"analysis_config" : {
|
||||
"bucket_span":3600,
|
||||
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
|
||||
},
|
||||
"dataDescription" : {
|
||||
"fieldDelimiter":",",
|
||||
"timeField":"time",
|
||||
"timeFormat":"yyyy-MM-dd HH:mm:ssX"
|
||||
"data_description" : {
|
||||
"field_delimiter":",",
|
||||
"time_field":"time",
|
||||
"time_format":"yyyy-MM-dd HH:mm:ssX"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ setup:
|
|||
mappings:
|
||||
results:
|
||||
properties:
|
||||
"jobId":
|
||||
"job_id":
|
||||
type: keyword
|
||||
"result_type":
|
||||
type: keyword
|
||||
|
@ -18,14 +18,14 @@ setup:
|
|||
index: prelertresults-farequote
|
||||
type: result
|
||||
id: "farequote_1464739200000_1"
|
||||
body: { "jobId": "farequote", "result_type": "bucket", "timestamp": "2016-06-01T00:00:00Z", "bucketSpan":1 }
|
||||
body: { "job_id": "farequote", "result_type": "bucket", "timestamp": "2016-06-01T00:00:00Z", "bucket_span":1 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-farequote
|
||||
type: result
|
||||
id: "farequote_1464739200000_2"
|
||||
body: { "jobId": "farequote", "result_type": "bucket", "timestamp": "2016-08-01T00:00:00Z", "bucketSpan":1 }
|
||||
body: { "job_id": "farequote", "result_type": "bucket", "timestamp": "2016-08-01T00:00:00Z", "bucket_span":1 }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
|
@ -41,7 +41,7 @@ setup:
|
|||
|
||||
- match: { count: 1 }
|
||||
- match: { buckets.0.timestamp: 1464739200000 }
|
||||
- match: { buckets.0.jobId: farequote}
|
||||
- match: { buckets.0.job_id: farequote}
|
||||
- match: { buckets.0.result_type: bucket}
|
||||
|
||||
---
|
||||
|
@ -52,10 +52,10 @@ setup:
|
|||
|
||||
- match: { count: 2 }
|
||||
- match: { buckets.0.timestamp: 1464739200000 }
|
||||
- match: { buckets.0.jobId: farequote}
|
||||
- match: { buckets.0.job_id: farequote}
|
||||
- match: { buckets.0.result_type: bucket}
|
||||
- match: { buckets.1.timestamp: 1470009600000 }
|
||||
- match: { buckets.1.jobId: farequote}
|
||||
- match: { buckets.1.job_id: farequote}
|
||||
- match: { buckets.1.result_type: bucket}
|
||||
|
||||
---
|
||||
|
@ -66,5 +66,5 @@ setup:
|
|||
timestamp: "2016-06-01T00:00:00Z"
|
||||
|
||||
- match: { buckets.0.timestamp: 1464739200000}
|
||||
- match: { buckets.0.jobId: farequote }
|
||||
- match: { buckets.0.job_id: farequote }
|
||||
- match: { buckets.0.result_type: bucket}
|
||||
|
|
|
@ -2,21 +2,21 @@ setup:
|
|||
- do:
|
||||
index:
|
||||
index: prelertresults-farequote
|
||||
type: categoryDefinition
|
||||
type: category_definition
|
||||
id: 1
|
||||
body: { "jobId": "farequote", "categoryId": 1 }
|
||||
body: { "job_id": "farequote", "category_id": 1 }
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-farequote
|
||||
type: categoryDefinition
|
||||
type: category_definition
|
||||
id: 2
|
||||
body: { "jobId": "farequote", "categoryId": 2 }
|
||||
body: { "job_id": "farequote", "category_id": 2 }
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-unrelated
|
||||
type: categoryDefinition
|
||||
type: category_definition
|
||||
id: 3
|
||||
body: { "jobId": "unrelated", "categoryId": 1 }
|
||||
body: { "job_id": "unrelated", "category_id": 1 }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
|
@ -29,10 +29,10 @@ setup:
|
|||
job_id: "farequote"
|
||||
|
||||
- match: { count: 2 }
|
||||
- match: { categories.0.jobId: farequote }
|
||||
- match: { categories.0.categoryId: 1 }
|
||||
- match: { categories.1.jobId: farequote }
|
||||
- match: { categories.1.categoryId: 2 }
|
||||
- match: { categories.0.job_id: farequote }
|
||||
- match: { categories.0.category_id: 1 }
|
||||
- match: { categories.1.job_id: farequote }
|
||||
- match: { categories.1.category_id: 2 }
|
||||
|
||||
---
|
||||
"Test result category api":
|
||||
|
@ -41,5 +41,5 @@ setup:
|
|||
job_id: "farequote"
|
||||
category_id: "1"
|
||||
|
||||
- match: { categories.0.jobId: farequote }
|
||||
- match: { categories.0.categoryId: 1 }
|
||||
- match: { categories.0.job_id: farequote }
|
||||
- match: { categories.0.category_id: 1 }
|
||||
|
|
|
@ -6,11 +6,11 @@ setup:
|
|||
mappings:
|
||||
result:
|
||||
properties:
|
||||
"jobId":
|
||||
"job_id":
|
||||
type: keyword
|
||||
"timestamp":
|
||||
type: date
|
||||
"anomalyScore":
|
||||
"anomaly_score":
|
||||
type: float
|
||||
"result_type":
|
||||
type: keyword
|
||||
|
@ -21,11 +21,11 @@ setup:
|
|||
id: 1
|
||||
body:
|
||||
{
|
||||
"jobId": "farequote",
|
||||
"job_id": "farequote",
|
||||
"timestamp": "2016-06-01T00:00:00Z",
|
||||
"influencerFieldName": "foo",
|
||||
"influencerFieldValue": "bar",
|
||||
"anomalyScore": 80.0,
|
||||
"influencer_field_name": "foo",
|
||||
"influencer_field_value": "bar",
|
||||
"anomaly_score": 80.0,
|
||||
"result_type" : "influencer"
|
||||
}
|
||||
|
||||
|
@ -36,11 +36,11 @@ setup:
|
|||
id: 2
|
||||
body:
|
||||
{
|
||||
"jobId": "farequote",
|
||||
"job_id": "farequote",
|
||||
"timestamp": "2016-06-02T00:00:00Z",
|
||||
"influencerFieldName": "foo",
|
||||
"influencerFieldValue": "zoo",
|
||||
"anomalyScore": 50.0,
|
||||
"influencer_field_name": "foo",
|
||||
"influencer_field_value": "zoo",
|
||||
"anomaly_score": 50.0,
|
||||
"result_type" : "influencer"
|
||||
}
|
||||
- do:
|
||||
|
|
|
@ -6,23 +6,23 @@ setup:
|
|||
mappings:
|
||||
record:
|
||||
properties:
|
||||
"jobId":
|
||||
"job_id":
|
||||
type: keyword
|
||||
"result_type":
|
||||
type: keyword
|
||||
"timestamp":
|
||||
type: date
|
||||
"normalizedProbability":
|
||||
"normalized_probability":
|
||||
type: float
|
||||
"anomalyScore":
|
||||
"anomaly_score":
|
||||
type: float
|
||||
"overFieldValue":
|
||||
"over_field_value":
|
||||
type: keyword
|
||||
"partitionFieldValue":
|
||||
"partition_field_value":
|
||||
type: keyword
|
||||
"byFieldValue":
|
||||
"by_field_value":
|
||||
type: keyword
|
||||
"fieldName":
|
||||
"field_name":
|
||||
type: keyword
|
||||
"function":
|
||||
type: keyword
|
||||
|
@ -34,10 +34,10 @@ setup:
|
|||
id: 2
|
||||
body:
|
||||
{
|
||||
"jobId": "farequote",
|
||||
"job_id": "farequote",
|
||||
"result_type": "record",
|
||||
"timestamp": "2016-06-01T00:00:00Z",
|
||||
"anomalyScore": 60.0
|
||||
"anomaly_score": 60.0
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -47,10 +47,10 @@ setup:
|
|||
id: 3
|
||||
body:
|
||||
{
|
||||
"jobId": "farequote",
|
||||
"job_id": "farequote",
|
||||
"result_type": "record",
|
||||
"timestamp": "2016-06-02T00:00:00Z",
|
||||
"anomalyScore": 80.0
|
||||
"anomaly_score": 80.0
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -65,10 +65,10 @@ setup:
|
|||
|
||||
- match: { count: 2 }
|
||||
- match: { records.0.timestamp: 1464825600000 }
|
||||
- match: { records.0.jobId: farequote}
|
||||
- match: { records.0.job_id: farequote}
|
||||
- match: { records.0.result_type: record}
|
||||
- match: { records.1.timestamp: 1464739200000 }
|
||||
- match: { records.1.jobId: farequote}
|
||||
- match: { records.1.job_id: farequote}
|
||||
- match: { records.1.result_type: record}
|
||||
|
||||
---
|
||||
|
@ -81,5 +81,5 @@ setup:
|
|||
|
||||
- match: { count: 1 }
|
||||
- match: { records.0.timestamp: 1464739200000 }
|
||||
- match: { records.0.jobId: farequote}
|
||||
- match: { records.0.job_id: farequote}
|
||||
- match: { records.0.result_type: record}
|
||||
|
|
|
@ -3,16 +3,16 @@ setup:
|
|||
xpack.prelert.put_job:
|
||||
body: >
|
||||
{
|
||||
"jobId":"job-stats-test",
|
||||
"job_id":"job-stats-test",
|
||||
"description":"Analysis of response time by airline",
|
||||
"analysisConfig" : {
|
||||
"bucketSpan":3600,
|
||||
"detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}]
|
||||
"analysis_config" : {
|
||||
"bucket_span":3600,
|
||||
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
|
||||
},
|
||||
"dataDescription" : {
|
||||
"data_description" : {
|
||||
"format":"JSON",
|
||||
"timeField":"time",
|
||||
"timeFormat":"epoch"
|
||||
"time_field":"time",
|
||||
"time_format":"epoch"
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,23 +20,23 @@ setup:
|
|||
xpack.prelert.put_job:
|
||||
body: >
|
||||
{
|
||||
"jobId":"scheduled-job",
|
||||
"job_id":"scheduled-job",
|
||||
"description":"A job with a scheduler",
|
||||
"analysisConfig" : {
|
||||
"bucketSpan":3600,
|
||||
"detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}]
|
||||
"analysis_config" : {
|
||||
"bucket_span":3600,
|
||||
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
|
||||
},
|
||||
"dataDescription" : {
|
||||
"data_description" : {
|
||||
"format" : "ELASTICSEARCH",
|
||||
"timeField":"time",
|
||||
"timeFormat":"yyyy-MM-dd'T'HH:mm:ssX"
|
||||
"time_field":"time",
|
||||
"time_format":"yyyy-MM-dd'T'HH:mm:ssX"
|
||||
},
|
||||
"schedulerConfig": {
|
||||
"dataSource":"ELASTICSEARCH",
|
||||
"baseUrl":"http://marple:9202",
|
||||
"scheduler_config": {
|
||||
"data_source":"ELASTICSEARCH",
|
||||
"base_url":"http://marple:9202",
|
||||
"indexes":["farequote"],
|
||||
"types":["response"],
|
||||
"retrieveWholeSource":true
|
||||
"retrieve_whole_source":true
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,16 +3,16 @@ setup:
|
|||
xpack.prelert.put_job:
|
||||
body: >
|
||||
{
|
||||
"jobId":"farequote",
|
||||
"job_id":"farequote",
|
||||
"description":"Analysis of response time by airline",
|
||||
"analysisConfig" : {
|
||||
"bucketSpan":3600,
|
||||
"detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}]
|
||||
"analysis_config" : {
|
||||
"bucket_span":3600,
|
||||
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
|
||||
},
|
||||
"dataDescription" : {
|
||||
"data_description" : {
|
||||
"format":"JSON",
|
||||
"timeField":"time",
|
||||
"timeFormat":"epoch"
|
||||
"time_field":"time",
|
||||
"time_format":"epoch"
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -54,7 +54,7 @@ setup:
|
|||
- do:
|
||||
get:
|
||||
index: prelertresults-farequote
|
||||
type: dataCounts
|
||||
type: data_counts
|
||||
id: farequote-data-counts
|
||||
|
||||
- match: { _source.processed_record_count: 2 }
|
||||
|
@ -73,7 +73,7 @@ setup:
|
|||
catch: /parse_exception/
|
||||
xpack.prelert.post_data:
|
||||
job_id: foo
|
||||
resetStart: not_a_date
|
||||
reset_start: not_a_date
|
||||
body: >
|
||||
{"airline":"AAL","responsetime":"132.2046","sourcetype":"farequote","time":"1403481600"}
|
||||
{"airline":"JZA","responsetime":"990.4628","sourcetype":"farequote","time":"1403481600"}
|
||||
|
@ -82,7 +82,7 @@ setup:
|
|||
catch: /parse_exception/
|
||||
xpack.prelert.post_data:
|
||||
job_id: foo
|
||||
resetEnd: end_not_a_date
|
||||
reset_end: end_not_a_date
|
||||
body: >
|
||||
{"airline":"AAL","responsetime":"132.2046","sourcetype":"farequote","time":"1403481600"}
|
||||
{"airline":"JZA","responsetime":"990.4628","sourcetype":"farequote","time":"1403481600"}
|
||||
|
@ -105,4 +105,4 @@ setup:
|
|||
catch: /parse_exception/
|
||||
xpack.prelert.flush_data:
|
||||
job_id: foo
|
||||
advanceTime: advance_time_not_a_date
|
||||
advance_time: advance_time_not_a_date
|
||||
|
|
|
@ -4,36 +4,36 @@ setup:
|
|||
index: prelertresults-foo
|
||||
body:
|
||||
mappings:
|
||||
modelSnapshot:
|
||||
model_snapshot:
|
||||
properties:
|
||||
"timestamp":
|
||||
type: date
|
||||
"restorePriority":
|
||||
"restore_priority":
|
||||
type: integer
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelSnapshot
|
||||
type: model_snapshot
|
||||
id: "1"
|
||||
body: >
|
||||
{
|
||||
"jobId" : "foo",
|
||||
"job_id" : "foo",
|
||||
"timestamp": "2016-06-02T00:00:00Z",
|
||||
"restorePriority": "1",
|
||||
"snapshotId": "foo"
|
||||
"restore_priority": "1",
|
||||
"snapshot_id": "foo"
|
||||
}
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelSnapshot
|
||||
type: model_snapshot
|
||||
id: "2"
|
||||
body: >
|
||||
{
|
||||
"jobId": "foo",
|
||||
"job_id": "foo",
|
||||
"timestamp": "2016-06-01T00:00:00Z",
|
||||
"restorePriority": "2",
|
||||
"snapshotId": "bar",
|
||||
"restore_priority": "2",
|
||||
"snapshot_id": "bar",
|
||||
"description": "bar"
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ setup:
|
|||
description: "new_description"
|
||||
|
||||
- match: { count: 0 }
|
||||
- length: { modelSnapshots: 0 }
|
||||
- length: { model_snapshots: 0 }
|
||||
|
||||
- do:
|
||||
xpack.prelert.put_model_snapshot_description:
|
||||
|
@ -85,8 +85,8 @@ setup:
|
|||
description: "new_description"
|
||||
|
||||
- match: { count: 1 }
|
||||
- match: { modelSnapshots.0.restorePriority: 1 }
|
||||
- match: { modelSnapshots.0.timestamp: 1464825600000 }
|
||||
- match: { model_snapshots.0.restore_priority: 1 }
|
||||
- match: { model_snapshots.0.timestamp: 1464825600000 }
|
||||
|
||||
---
|
||||
"Test with conflict against existing description":
|
||||
|
@ -96,9 +96,9 @@ setup:
|
|||
description: "bar"
|
||||
|
||||
- match: { count: 1 }
|
||||
- length: { modelSnapshots: 1 }
|
||||
- match: { modelSnapshots.0.jobId: "foo" }
|
||||
- match: { modelSnapshots.0.description: "bar" }
|
||||
- length: { model_snapshots: 1 }
|
||||
- match: { model_snapshots.0.job_id: "foo" }
|
||||
- match: { model_snapshots.0.description: "bar" }
|
||||
|
||||
- do:
|
||||
catch: request
|
||||
|
|
|
@ -3,48 +3,48 @@ setup:
|
|||
xpack.prelert.put_job:
|
||||
body: >
|
||||
{
|
||||
"jobId":"foo",
|
||||
"job_id":"foo",
|
||||
"description":"Analysis of response time by airline",
|
||||
"analysisConfig" : {
|
||||
"bucketSpan":3600,
|
||||
"detectors" :[{"function":"metric","fieldName":"responsetime","byFieldName":"airline"}]
|
||||
"analysis_config" : {
|
||||
"bucket_span":3600,
|
||||
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
|
||||
},
|
||||
"dataDescription" : {
|
||||
"fieldDelimiter":",",
|
||||
"timeField":"time",
|
||||
"timeFormat":"yyyy-MM-dd HH:mm:ssX"
|
||||
"data_description" : {
|
||||
"field_delimiter":",",
|
||||
"time_field":"time",
|
||||
"time_format":"yyyy-MM-dd HH:mm:ssX"
|
||||
}
|
||||
}
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelSnapshot
|
||||
type: model_snapshot
|
||||
id: "1"
|
||||
body: >
|
||||
{
|
||||
"jobId": "foo",
|
||||
"job_id": "foo",
|
||||
"timestamp": "2016-06-02T00:00:00Z",
|
||||
"restorePriority": "1",
|
||||
"snapshotId": "foo1",
|
||||
"restore_priority": "1",
|
||||
"snapshot_id": "foo1",
|
||||
"description": "first",
|
||||
"latestRecordTimeStamp": "2016-06-02T00:00:00Z",
|
||||
"latestResultTimeStamp": "2016-06-02T00:00:00Z"
|
||||
"latest_record_time_stamp": "2016-06-02T00:00:00Z",
|
||||
"latest_result_time_stamp": "2016-06-02T00:00:00Z"
|
||||
}
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: modelSnapshot
|
||||
type: model_snapshot
|
||||
id: "2"
|
||||
body: >
|
||||
{
|
||||
"jobId": "foo",
|
||||
"job_id": "foo",
|
||||
"timestamp": "2016-06-01T00:00:00Z",
|
||||
"restorePriority": "2",
|
||||
"snapshotId": "foo2",
|
||||
"restore_priority": "2",
|
||||
"snapshot_id": "foo2",
|
||||
"description": "second",
|
||||
"latestRecordTimeStamp": "2016-06-01T00:00:00Z",
|
||||
"latestResultTimeStamp": "2016-06-01T00:00:00Z"
|
||||
"latest_record_time_stamp": "2016-06-01T00:00:00Z",
|
||||
"latest_result_time_stamp": "2016-06-01T00:00:00Z"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -52,35 +52,35 @@ setup:
|
|||
index: prelertresults-foo
|
||||
type: result
|
||||
id: "foo_1464825600000_1"
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-06-02T00:00:00Z", "bucketSpan":1 }
|
||||
body: { "job_id": "foo", "result_type": "bucket", "timestamp": "2016-06-02T00:00:00Z", "bucket_span":1 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: result
|
||||
id: "foo_1464782400000_1"
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-06-01T12:00:00Z", "bucketSpan":1 }
|
||||
body: { "job_id": "foo", "result_type": "bucket", "timestamp": "2016-06-01T12:00:00Z", "bucket_span":1 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: result
|
||||
id: "foo_1462060800000_1"
|
||||
body: { "jobId": "foo", "result_type": "bucket", "timestamp": "2016-05-01T00:00:00Z", "bucketSpan":1 }
|
||||
body: { "job_id": "foo", "result_type": "bucket", "timestamp": "2016-05-01T00:00:00Z", "bucket_span":1 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: result
|
||||
id: "foo_1464825600000_1_record"
|
||||
body: { "jobId": "foo", "result_type": "record", "timestamp": "2016-06-02T00:00:00Z" }
|
||||
body: { "job_id": "foo", "result_type": "record", "timestamp": "2016-06-02T00:00:00Z" }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: prelertresults-foo
|
||||
type: result
|
||||
id: "foo_1462060800000_1_record"
|
||||
body: { "jobId": "foo", "result_type": "record", "timestamp": "2016-05-01T00:00:00Z" }
|
||||
body: { "job_id": "foo", "result_type": "record", "timestamp": "2016-05-01T00:00:00Z" }
|
||||
|
||||
- do:
|
||||
index:
|
||||
|
@ -88,12 +88,12 @@ setup:
|
|||
type: result
|
||||
id: "foo_1464825600000_1_influencer"
|
||||
body: {
|
||||
"jobId": "foo",
|
||||
"job_id": "foo",
|
||||
"result_type": "influencer",
|
||||
"timestamp": "2016-06-02T00:00:00Z",
|
||||
"influencerFieldName": "foo",
|
||||
"influencerFieldValue": "zoo",
|
||||
"anomalyScore": 50.0
|
||||
"influencer_field_name": "foo",
|
||||
"influencer_field_value": "zoo",
|
||||
"anomaly_score": 50.0
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -103,12 +103,12 @@ setup:
|
|||
id: "foo_1462060800000_1_influencer"
|
||||
body:
|
||||
{
|
||||
"jobId": "foo",
|
||||
"job_id": "foo",
|
||||
"result_type": "influencer",
|
||||
"timestamp": "2016-05-01T00:00:00Z",
|
||||
"influencerFieldName": "foo",
|
||||
"influencerFieldValue": "zoo",
|
||||
"anomalyScore": 50.0
|
||||
"influencer_field_name": "foo",
|
||||
"influencer_field_value": "zoo",
|
||||
"anomaly_score": 50.0
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -136,7 +136,7 @@ setup:
|
|||
catch: /resource_not_found_exception/
|
||||
xpack.prelert.revert_model_snapshot:
|
||||
job_id: "foo"
|
||||
snapshotId: "not_exist"
|
||||
snapshot_id: "not_exist"
|
||||
|
||||
---
|
||||
"Test revert model with invalid description":
|
||||
|
@ -151,26 +151,26 @@ setup:
|
|||
- do:
|
||||
xpack.prelert.revert_model_snapshot:
|
||||
job_id: "foo"
|
||||
snapshotId: "foo1"
|
||||
snapshot_id: "foo1"
|
||||
|
||||
- match: { acknowledged: true }
|
||||
- match: { model.jobId: "foo" }
|
||||
- match: { model.job_id: "foo" }
|
||||
- match: { model.timestamp: 1464825600000 }
|
||||
- match: { model.restorePriority: 1 }
|
||||
- match: { model.snapshotId: "foo1" }
|
||||
- match: { model.snapshotDocCount: 0 }
|
||||
- match: { model.restore_priority: 1 }
|
||||
- match: { model.snapshot_id: "foo1" }
|
||||
- match: { model.snapshot_doc_count: 0 }
|
||||
|
||||
- do:
|
||||
xpack.prelert.revert_model_snapshot:
|
||||
job_id: "foo"
|
||||
snapshotId: "foo2"
|
||||
snapshot_id: "foo2"
|
||||
|
||||
- match: { acknowledged: true }
|
||||
- match: { model.jobId: "foo" }
|
||||
- match: { model.job_id: "foo" }
|
||||
- match: { model.timestamp: 1464739200000 }
|
||||
- match: { model.restorePriority: 2 }
|
||||
- match: { model.snapshotId: "foo2" }
|
||||
- match: { model.snapshotDocCount: 0 }
|
||||
- match: { model.restore_priority: 2 }
|
||||
- match: { model.snapshot_id: "foo2" }
|
||||
- match: { model.snapshot_doc_count: 0 }
|
||||
|
||||
---
|
||||
"Test revert model with valid time":
|
||||
|
@ -180,11 +180,11 @@ setup:
|
|||
time: "2016-06-02T01:00:00Z"
|
||||
|
||||
- match: { acknowledged: true }
|
||||
- match: { model.jobId: "foo" }
|
||||
- match: { model.job_id: "foo" }
|
||||
- match: { model.timestamp: 1464825600000 }
|
||||
- match: { model.restorePriority: 1 }
|
||||
- match: { model.snapshotId: "foo1" }
|
||||
- match: { model.snapshotDocCount: 0 }
|
||||
- match: { model.restore_priority: 1 }
|
||||
- match: { model.snapshot_id: "foo1" }
|
||||
- match: { model.snapshot_doc_count: 0 }
|
||||
|
||||
- do:
|
||||
xpack.prelert.revert_model_snapshot:
|
||||
|
@ -192,11 +192,11 @@ setup:
|
|||
time: "2016-06-01T01:00:00Z"
|
||||
|
||||
- match: { acknowledged: true }
|
||||
- match: { model.jobId: "foo" }
|
||||
- match: { model.job_id: "foo" }
|
||||
- match: { model.timestamp: 1464739200000 }
|
||||
- match: { model.restorePriority: 2 }
|
||||
- match: { model.snapshotId: "foo2" }
|
||||
- match: { model.snapshotDocCount: 0 }
|
||||
- match: { model.restore_priority: 2 }
|
||||
- match: { model.snapshot_id: "foo2" }
|
||||
- match: { model.snapshot_doc_count: 0 }
|
||||
|
||||
---
|
||||
"Test revert model with valid description":
|
||||
|
@ -206,11 +206,11 @@ setup:
|
|||
description: "first"
|
||||
|
||||
- match: { acknowledged: true }
|
||||
- match: { model.jobId: "foo" }
|
||||
- match: { model.job_id: "foo" }
|
||||
- match: { model.timestamp: 1464825600000 }
|
||||
- match: { model.restorePriority: 1 }
|
||||
- match: { model.snapshotId: "foo1" }
|
||||
- match: { model.snapshotDocCount: 0 }
|
||||
- match: { model.restore_priority: 1 }
|
||||
- match: { model.snapshot_id: "foo1" }
|
||||
- match: { model.snapshot_doc_count: 0 }
|
||||
|
||||
- do:
|
||||
xpack.prelert.revert_model_snapshot:
|
||||
|
@ -218,11 +218,11 @@ setup:
|
|||
description: "second"
|
||||
|
||||
- match: { acknowledged: true }
|
||||
- match: { model.jobId: "foo" }
|
||||
- match: { model.job_id: "foo" }
|
||||
- match: { model.timestamp: 1464739200000 }
|
||||
- match: { model.restorePriority: 2 }
|
||||
- match: { model.snapshotId: "foo2" }
|
||||
- match: { model.snapshotDocCount: 0 }
|
||||
- match: { model.restore_priority: 2 }
|
||||
- match: { model.snapshot_id: "foo2" }
|
||||
- match: { model.snapshot_doc_count: 0 }
|
||||
|
||||
---
|
||||
"Test revert model with deleteInterveningResults":
|
||||
|
@ -237,8 +237,8 @@ setup:
|
|||
- do:
|
||||
xpack.prelert.revert_model_snapshot:
|
||||
job_id: "foo"
|
||||
snapshotId: "foo2"
|
||||
deleteInterveningResults: true
|
||||
snapshot_id: "foo2"
|
||||
delete_intervening_results: true
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
|
@ -251,7 +251,7 @@ setup:
|
|||
end: "2016-12-01T00:00:00Z"
|
||||
|
||||
- match: { count: 1 }
|
||||
- match: { buckets.0.jobId: "foo" }
|
||||
- match: { buckets.0.job_id: "foo" }
|
||||
- match: { buckets.0.timestamp: 1462060800000 }
|
||||
|
||||
- do:
|
||||
|
@ -261,7 +261,7 @@ setup:
|
|||
end: "2016-12-01T00:00:00Z"
|
||||
|
||||
- match: { count: 1 }
|
||||
- match: { records.0.jobId: "foo" }
|
||||
- match: { records.0.job_id: "foo" }
|
||||
- match: { records.0.timestamp: 1462060800000 }
|
||||
|
||||
- do:
|
||||
|
@ -271,7 +271,7 @@ setup:
|
|||
end: "2016-12-01T01:00:00Z"
|
||||
|
||||
- match: { count: 1 }
|
||||
- match: { influencers.0.jobId: "foo" }
|
||||
- match: { influencers.0.job_id: "foo" }
|
||||
- match: { influencers.0.timestamp: 1462060800000 }
|
||||
|
||||
- do:
|
||||
|
|
|
@ -5,17 +5,17 @@
|
|||
body: >
|
||||
{
|
||||
"function":"count",
|
||||
"byFieldName":"airline"
|
||||
"by_field_name":"airline"
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
---
|
||||
"Test invalid detector":
|
||||
- do:
|
||||
catch: /fieldName must be set when the 'mean' function is used/
|
||||
catch: /field_name must be set when the 'mean' function is used/
|
||||
xpack.prelert.validate_detector:
|
||||
body: >
|
||||
{
|
||||
"function":"mean",
|
||||
"byFieldName":"airline"
|
||||
"by_field_name":"airline"
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue