[ML] Change the root_cause of error responses to be more informative (elastic/x-pack-elasticsearch#1598)
When an error response contains multiple layers of errors, Kibana displays the one labelled root_cause. The definition of root_cause is the most deeply nested ElasticsearchException. Therefore, it is of great benefit to the UI if our config validation returns the actual problem in an ElasticsearchException rather than an IllegalArgumentException. This commit also adds an extra validation check to catch the case of a single job config containing fields x.y as well as x earlier. Previously this was caught when we tried to create results mappings, and was accompanied by an error suggesting that using a dedicated results index would help, when clearly it won't for a clash in a single job config. Fixes elastic/x-pack-kibana#1387 Fixes elastic/prelert-legacy#349 Original commit: elastic/x-pack-elasticsearch@7d1b7def6c
This commit is contained in:
parent
ef25e2b604
commit
6484f812c0
|
@ -240,11 +240,11 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
|
||||
/**
|
||||
* Returns the date histogram interval as epoch millis if valid, or throws
|
||||
* an {@link IllegalArgumentException} with the validation error
|
||||
* an {@link ElasticsearchException} with the validation error
|
||||
*/
|
||||
private static long validateAndGetDateHistogramInterval(DateHistogramAggregationBuilder dateHistogram) {
|
||||
if (dateHistogram.timeZone() != null && dateHistogram.timeZone().equals(DateTimeZone.UTC) == false) {
|
||||
throw new IllegalArgumentException("ML requires date_histogram.time_zone to be UTC");
|
||||
throw ExceptionsHelper.badRequestException("ML requires date_histogram.time_zone to be UTC");
|
||||
}
|
||||
|
||||
if (dateHistogram.dateHistogramInterval() != null) {
|
||||
|
@ -277,21 +277,21 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
case MONTH_OF_YEAR:
|
||||
case YEAR_OF_CENTURY:
|
||||
case QUARTER:
|
||||
throw new IllegalArgumentException(invalidDateHistogramCalendarIntervalMessage(calendarInterval));
|
||||
throw ExceptionsHelper.badRequestException(invalidDateHistogramCalendarIntervalMessage(calendarInterval));
|
||||
default:
|
||||
throw new IllegalArgumentException("Unexpected dateTimeUnit [" + dateTimeUnit + "]");
|
||||
throw ExceptionsHelper.badRequestException("Unexpected dateTimeUnit [" + dateTimeUnit + "]");
|
||||
}
|
||||
} else {
|
||||
interval = TimeValue.parseTimeValue(calendarInterval, "date_histogram.interval");
|
||||
}
|
||||
if (interval.days() > 7) {
|
||||
throw new IllegalArgumentException(invalidDateHistogramCalendarIntervalMessage(calendarInterval));
|
||||
throw ExceptionsHelper.badRequestException(invalidDateHistogramCalendarIntervalMessage(calendarInterval));
|
||||
}
|
||||
return interval.millis();
|
||||
}
|
||||
|
||||
private static String invalidDateHistogramCalendarIntervalMessage(String interval) {
|
||||
throw new IllegalArgumentException("When specifying a date_histogram calendar interval ["
|
||||
throw ExceptionsHelper.badRequestException("When specifying a date_histogram calendar interval ["
|
||||
+ interval + "], ML does not accept intervals longer than a week because of " +
|
||||
"variable lengths of periods greater than a week");
|
||||
}
|
||||
|
@ -512,7 +512,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
if (scrollSize < 0) {
|
||||
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE,
|
||||
DatafeedConfig.SCROLL_SIZE.getPreferredName(), scrollSize);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
this.scrollSize = scrollSize;
|
||||
}
|
||||
|
@ -529,7 +529,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
ExceptionsHelper.requireNonNull(id, ID.getPreferredName());
|
||||
ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||
if (!MlStrings.isValidId(id)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName()));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName()));
|
||||
}
|
||||
if (indices == null || indices.isEmpty() || indices.contains(null) || indices.contains("")) {
|
||||
throw invalidOptionValue(INDICES.getPreferredName(), indices);
|
||||
|
@ -548,23 +548,24 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
return;
|
||||
}
|
||||
if (scriptFields != null && !scriptFields.isEmpty()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.DATAFEED_CONFIG_CANNOT_USE_SCRIPT_FIELDS_WITH_AGGS));
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.DATAFEED_CONFIG_CANNOT_USE_SCRIPT_FIELDS_WITH_AGGS));
|
||||
}
|
||||
List<AggregationBuilder> aggregatorFactories = aggregations.getAggregatorFactories();
|
||||
if (aggregatorFactories.isEmpty()) {
|
||||
throw new IllegalArgumentException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM);
|
||||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM);
|
||||
}
|
||||
AggregationBuilder topLevelAgg = aggregatorFactories.get(0);
|
||||
if (topLevelAgg instanceof HistogramAggregationBuilder) {
|
||||
if (((HistogramAggregationBuilder) topLevelAgg).interval() <= 0) {
|
||||
throw new IllegalArgumentException(Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO);
|
||||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO);
|
||||
}
|
||||
} else if (topLevelAgg instanceof DateHistogramAggregationBuilder) {
|
||||
if (validateAndGetDateHistogramInterval((DateHistogramAggregationBuilder) topLevelAgg) <= 0) {
|
||||
throw new IllegalArgumentException(Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO);
|
||||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO);
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM);
|
||||
throw ExceptionsHelper.badRequestException(Messages.DATAFEED_AGGREGATIONS_REQUIRES_DATE_HISTOGRAM);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -582,7 +583,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
|
||||
private static ElasticsearchException invalidOptionValue(String fieldName, Object value) {
|
||||
String msg = Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, fieldName, value);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -186,7 +186,7 @@ public class JobManager extends AbstractComponent {
|
|||
public void onFailure(Exception e) {
|
||||
if (e instanceof IllegalArgumentException
|
||||
&& e.getMessage().matches("mapper \\[.*\\] of different type, current_type \\[.*\\], merged_type \\[.*\\]")) {
|
||||
actionListener.onFailure(new IllegalArgumentException(Messages.JOB_CONFIG_MAPPING_TYPE_CLASH, e));
|
||||
actionListener.onFailure(ExceptionsHelper.badRequestException(Messages.JOB_CONFIG_MAPPING_TYPE_CLASH, e));
|
||||
} else {
|
||||
actionListener.onFailure(e);
|
||||
}
|
||||
|
|
|
@ -16,15 +16,17 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
|
@ -53,7 +55,7 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
private static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
|
||||
private static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
|
||||
private static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name");
|
||||
public static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters");
|
||||
static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters");
|
||||
private static final ParseField LATENCY = new ParseField("latency");
|
||||
private static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name");
|
||||
private static final ParseField DETECTORS = new ParseField("detectors");
|
||||
|
@ -65,7 +67,7 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
private static final ParseField USER_PER_PARTITION_NORMALIZATION = new ParseField("use_per_partition_normalization");
|
||||
|
||||
private static final String ML_CATEGORY_FIELD = "mlcategory";
|
||||
public static final Set<String> AUTO_CREATED_FIELDS = new HashSet<>(Arrays.asList(ML_CATEGORY_FIELD));
|
||||
public static final Set<String> AUTO_CREATED_FIELDS = new HashSet<>(Collections.singletonList(ML_CATEGORY_FIELD));
|
||||
|
||||
public static final long DEFAULT_RESULT_FINALIZATION_WINDOW = 2L;
|
||||
|
||||
|
@ -229,11 +231,15 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
* @return Set of term fields - never <code>null</code>
|
||||
*/
|
||||
public Set<String> termFields() {
|
||||
Set<String> termFields = new TreeSet<>();
|
||||
return termFields(getDetectors(), getInfluencers());
|
||||
}
|
||||
|
||||
getDetectors().stream().forEach(d -> termFields.addAll(d.getByOverPartitionTerms()));
|
||||
static SortedSet<String> termFields(List<Detector> detectors, List<String> influencers) {
|
||||
SortedSet<String> termFields = new TreeSet<>();
|
||||
|
||||
for (String i : getInfluencers()) {
|
||||
detectors.forEach(d -> termFields.addAll(d.getByOverPartitionTerms()));
|
||||
|
||||
for (String i : influencers) {
|
||||
addIfNotNull(termFields, i);
|
||||
}
|
||||
|
||||
|
@ -301,7 +307,7 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
|
||||
public List<String> fields() {
|
||||
return collectNonNullAndNonEmptyDetectorFields(d -> d.getFieldName());
|
||||
return collectNonNullAndNonEmptyDetectorFields(Detector::getFieldName);
|
||||
}
|
||||
|
||||
private List<String> collectNonNullAndNonEmptyDetectorFields(
|
||||
|
@ -319,16 +325,16 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
|
||||
public List<String> byFields() {
|
||||
return collectNonNullAndNonEmptyDetectorFields(d -> d.getByFieldName());
|
||||
return collectNonNullAndNonEmptyDetectorFields(Detector::getByFieldName);
|
||||
}
|
||||
|
||||
public List<String> overFields() {
|
||||
return collectNonNullAndNonEmptyDetectorFields(d -> d.getOverFieldName());
|
||||
return collectNonNullAndNonEmptyDetectorFields(Detector::getOverFieldName);
|
||||
}
|
||||
|
||||
|
||||
public List<String> partitionFields() {
|
||||
return collectNonNullAndNonEmptyDetectorFields(d -> d.getPartitionFieldName());
|
||||
return collectNonNullAndNonEmptyDetectorFields(Detector::getPartitionFieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -360,7 +366,7 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
if (multipleBucketSpans != null) {
|
||||
builder.field(MULTIPLE_BUCKET_SPANS.getPreferredName(),
|
||||
multipleBucketSpans.stream().map(s -> s.getStringRep()).collect(Collectors.toList()));
|
||||
multipleBucketSpans.stream().map(TimeValue::getStringRep).collect(Collectors.toList()));
|
||||
}
|
||||
if (usePerPartitionNormalization) {
|
||||
builder.field(USER_PER_PARTITION_NORMALIZATION.getPreferredName(), usePerPartitionNormalization);
|
||||
|
@ -399,7 +405,7 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
|
||||
public static class Builder {
|
||||
|
||||
public static final TimeValue DEFAULT_BUCKET_SPAN = TimeValue.timeValueMinutes(5);
|
||||
static final TimeValue DEFAULT_BUCKET_SPAN = TimeValue.timeValueMinutes(5);
|
||||
|
||||
private List<Detector> detectors;
|
||||
private TimeValue bucketSpan = DEFAULT_BUCKET_SPAN;
|
||||
|
@ -516,6 +522,8 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
checkNoInfluencersAreSet(influencers);
|
||||
}
|
||||
|
||||
verifyNoInconsistentNestedFieldNames();
|
||||
|
||||
return new AnalysisConfig(bucketSpan, categorizationFieldName, categorizationFilters,
|
||||
latency, summaryCountFieldName, detectors, influencers, overlappingBuckets,
|
||||
resultFinalizationWindow, multivariateByFields, multipleBucketSpans, usePerPartitionNormalization);
|
||||
|
@ -524,26 +532,49 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
private static void checkFieldIsNotNegativeIfSpecified(String fieldName, Long value) {
|
||||
if (value != null && value < 0) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, fieldName, 0, value);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyDetectorAreDefined() {
|
||||
if (detectors == null || detectors.isEmpty()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_NO_DETECTORS));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_NO_DETECTORS));
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyNoInconsistentNestedFieldNames() {
|
||||
SortedSet<String> termFields = termFields(detectors, influencers);
|
||||
// We want to outlaw nested fields where a less nested field clashes with one of the nested levels.
|
||||
// For example, this is not allowed:
|
||||
// - a
|
||||
// - a.b
|
||||
// Nor is this:
|
||||
// - a.b
|
||||
// - a.b.c
|
||||
// But this is OK:
|
||||
// - a.b
|
||||
// - a.c
|
||||
// The sorted set makes it relatively easy to detect the situations we want to avoid.
|
||||
String prevTermField = null;
|
||||
for (String termField : termFields) {
|
||||
if (prevTermField != null && termField.startsWith(prevTermField + ".")) {
|
||||
throw ExceptionsHelper.badRequestException("Fields " + prevTermField + " and " + termField +
|
||||
" cannot both be used in the same analysis_config");
|
||||
}
|
||||
prevTermField = termField;
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyMlCategoryIsUsedWhenCategorizationFieldNameIsSet() {
|
||||
Set<String> byOverPartitionFields = new TreeSet<>();
|
||||
detectors.stream().forEach(d -> byOverPartitionFields.addAll(d.getByOverPartitionTerms()));
|
||||
detectors.forEach(d -> byOverPartitionFields.addAll(d.getByOverPartitionTerms()));
|
||||
boolean isMlCategoryUsed = byOverPartitionFields.contains(ML_CATEGORY_FIELD);
|
||||
if (isMlCategoryUsed && categorizationFieldName == null) {
|
||||
throw new IllegalArgumentException(CATEGORIZATION_FIELD_NAME.getPreferredName()
|
||||
throw ExceptionsHelper.badRequestException(CATEGORIZATION_FIELD_NAME.getPreferredName()
|
||||
+ " must be set for " + ML_CATEGORY_FIELD + " to be available");
|
||||
}
|
||||
if (categorizationFieldName != null && isMlCategoryUsed == false) {
|
||||
throw new IllegalArgumentException(CATEGORIZATION_FIELD_NAME.getPreferredName()
|
||||
throw ExceptionsHelper.badRequestException(CATEGORIZATION_FIELD_NAME.getPreferredName()
|
||||
+ " is set but " + ML_CATEGORY_FIELD + " is not used in any detector by/over/partition field");
|
||||
}
|
||||
}
|
||||
|
@ -561,27 +592,28 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
|
||||
private void verifyCategorizationFieldNameSetIfFiltersAreSet() {
|
||||
if (categorizationFieldName == null) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(
|
||||
Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_REQUIRE_CATEGORIZATION_FIELD_NAME));
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyCategorizationFiltersAreDistinct() {
|
||||
if (categorizationFilters.stream().distinct().count() != categorizationFilters.size()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_DUPLICATES));
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_DUPLICATES));
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyCategorizationFiltersContainNoneEmpty() {
|
||||
if (categorizationFilters.stream().anyMatch(String::isEmpty)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY));
|
||||
}
|
||||
}
|
||||
|
||||
private void verifyCategorizationFiltersAreValidRegex() {
|
||||
for (String filter : categorizationFilters) {
|
||||
if (!isValidRegex(filter)) {
|
||||
throw new IllegalArgumentException(
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_INVALID_REGEX, filter));
|
||||
}
|
||||
}
|
||||
|
@ -594,7 +626,7 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
|
||||
for (TimeValue span : multipleBucketSpans) {
|
||||
if ((span.getSeconds() % bucketSpan.getSeconds() != 0L) || (span.compareTo(bucketSpan) <= 0)) {
|
||||
throw new IllegalArgumentException(
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_MULTIPLE_BUCKETSPANS_MUST_BE_MULTIPLE, span, bucketSpan));
|
||||
}
|
||||
}
|
||||
|
@ -606,31 +638,27 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
return true;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException(Messages.getMessage(
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(
|
||||
Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_REQUIRES_PARTITION_FIELD));
|
||||
}
|
||||
|
||||
private static boolean checkNoInfluencersAreSet(List<String> influencers) {
|
||||
private static void checkNoInfluencersAreSet(List<String> influencers) {
|
||||
if (!influencers.isEmpty()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(
|
||||
Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_CANNOT_USE_INFLUENCERS));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the characters used in a field name will not cause problems.
|
||||
*
|
||||
* @param field The field name to be validated
|
||||
* @return true
|
||||
*/
|
||||
public static boolean verifyFieldName(String field) throws ElasticsearchParseException {
|
||||
public static void verifyFieldName(String field) throws ElasticsearchParseException {
|
||||
if (field != null && containsInvalidChar(field)) {
|
||||
throw new IllegalArgumentException(
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_INVALID_FIELDNAME_CHARS, field, Detector.PROHIBITED));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static boolean containsInvalidChar(String field) {
|
||||
|
@ -664,7 +692,7 @@ public class AnalysisConfig extends ToXContentToBytes implements Writeable {
|
|||
}
|
||||
|
||||
if (Boolean.TRUE.equals(overlappingBuckets) && mustNotUse) {
|
||||
throw new IllegalArgumentException(
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_OVERLAPPING_BUCKETS_INCOMPATIBLE_FUNCTION, illegalFunctions.toString()));
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -55,7 +56,7 @@ public class AnalysisLimits extends ToXContentToBytes implements Writeable {
|
|||
if (categorizationExamplesLimit != null && categorizationExamplesLimit < 0) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, CATEGORIZATION_EXAMPLES_LIMIT, 0,
|
||||
categorizationExamplesLimit);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
this.categorizationExamplesLimit = categorizationExamplesLimit;
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -67,7 +68,7 @@ public class Condition extends ToXContentToBytes implements Writeable {
|
|||
|
||||
public Condition(Operator op, String filterValue) {
|
||||
if (filterValue == null) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NULL));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NULL));
|
||||
}
|
||||
|
||||
if (op.expectsANumericArgument()) {
|
||||
|
@ -75,14 +76,14 @@ public class Condition extends ToXContentToBytes implements Writeable {
|
|||
Double.parseDouble(filterValue);
|
||||
} catch (NumberFormatException nfe) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NUMBER, filterValue);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
Pattern.compile(filterValue);
|
||||
} catch (PatternSyntaxException e) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_REGEX, filterValue);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
this.op = op;
|
||||
|
|
|
@ -339,7 +339,7 @@ public class DataDescription extends ToXContentToBytes implements Writeable {
|
|||
try {
|
||||
DateTimeFormatterTimestampConverter.ofPattern(format, ZoneOffset.UTC);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format));
|
||||
}
|
||||
}
|
||||
timeFormat = format;
|
||||
|
|
|
@ -202,17 +202,17 @@ public class DetectionRule extends ToXContentToBytes implements Writeable {
|
|||
public DetectionRule build() {
|
||||
if (targetFieldValue != null && targetFieldName == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_MISSING_TARGET_FIELD_NAME, targetFieldValue);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
if (ruleConditions == null || ruleConditions.isEmpty()) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_REQUIRES_AT_LEAST_ONE_CONDITION);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
for (RuleCondition condition : ruleConditions) {
|
||||
if (condition.getConditionType() == RuleConditionType.CATEGORICAL && targetFieldName != null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_CATEGORICAL_INVALID_OPTION,
|
||||
DetectionRule.TARGET_FIELD_NAME_FIELD.getPreferredName());
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
return new DetectionRule(ruleAction, targetFieldName, targetFieldValue, conditionsConnective, ruleConditions);
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.ObjectParser;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -560,9 +561,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
useNull = detector.useNull;
|
||||
excludeFrequent = detector.excludeFrequent;
|
||||
detectorRules = new ArrayList<>(detector.detectorRules.size());
|
||||
for (DetectionRule rule : detector.getDetectorRules()) {
|
||||
detectorRules.add(rule);
|
||||
}
|
||||
detectorRules.addAll(detector.getDetectorRules());
|
||||
}
|
||||
|
||||
public Builder(String function, String fieldName) {
|
||||
|
@ -620,44 +619,47 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
boolean emptyOverField = Strings.isEmpty(overFieldName);
|
||||
boolean emptyPartitionField = Strings.isEmpty(partitionFieldName);
|
||||
if (Detector.ANALYSIS_FUNCTIONS.contains(function) == false) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_UNKNOWN_FUNCTION, function));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_UNKNOWN_FUNCTION, function));
|
||||
}
|
||||
|
||||
if (emptyField && emptyByField && emptyOverField) {
|
||||
if (!Detector.COUNT_WITHOUT_FIELD_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_NO_ANALYSIS_FIELD_NOT_COUNT));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_NO_ANALYSIS_FIELD_NOT_COUNT));
|
||||
}
|
||||
}
|
||||
|
||||
if (isSummarised && Detector.METRIC.equals(function)) {
|
||||
throw new IllegalArgumentException(
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED, Detector.METRIC));
|
||||
}
|
||||
|
||||
// check functions have required fields
|
||||
|
||||
if (emptyField && Detector.FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_FIELDNAME, function));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_FIELDNAME, function));
|
||||
}
|
||||
|
||||
if (!emptyField && (Detector.FIELD_NAME_FUNCTIONS.contains(function) == false)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FIELDNAME_INCOMPATIBLE_FUNCTION, function));
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_FIELDNAME_INCOMPATIBLE_FUNCTION, function));
|
||||
}
|
||||
|
||||
if (emptyByField && Detector.BY_FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_BYFIELD, function));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_BYFIELD, function));
|
||||
}
|
||||
|
||||
if (!emptyByField && Detector.NO_BY_FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_BYFIELD_INCOMPATIBLE_FUNCTION, function));
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_BYFIELD_INCOMPATIBLE_FUNCTION, function));
|
||||
}
|
||||
|
||||
if (emptyOverField && Detector.OVER_FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_OVERFIELD, function));
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_FUNCTION_REQUIRES_OVERFIELD, function));
|
||||
}
|
||||
|
||||
if (!emptyOverField && Detector.NO_OVER_FIELD_NAME_FUNCTIONS.contains(function)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_OVERFIELD_INCOMPATIBLE_FUNCTION, function));
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_OVERFIELD_INCOMPATIBLE_FUNCTION, function));
|
||||
}
|
||||
|
||||
// field names cannot contain certain characters
|
||||
|
@ -670,7 +672,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
if (detectorRules.isEmpty() == false) {
|
||||
if (FUNCTIONS_WITHOUT_RULE_SUPPORT.contains(function)) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_NOT_SUPPORTED_BY_FUNCTION, function);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
for (DetectionRule rule : detectorRules) {
|
||||
checkScoping(rule);
|
||||
|
@ -680,18 +682,18 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
// partition, by and over field names cannot be duplicates
|
||||
if (!emptyPartitionField) {
|
||||
if (partitionFieldName.equals(byFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_DUPLICATE_FIELD_NAME,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_DUPLICATE_FIELD_NAME,
|
||||
PARTITION_FIELD_NAME_FIELD.getPreferredName(), BY_FIELD_NAME_FIELD.getPreferredName(),
|
||||
partitionFieldName));
|
||||
}
|
||||
if (partitionFieldName.equals(overFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_DUPLICATE_FIELD_NAME,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_DUPLICATE_FIELD_NAME,
|
||||
PARTITION_FIELD_NAME_FIELD.getPreferredName(), OVER_FIELD_NAME_FIELD.getPreferredName(),
|
||||
partitionFieldName));
|
||||
}
|
||||
}
|
||||
if (!emptyByField && byFieldName.equals(overFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_DUPLICATE_FIELD_NAME,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_DUPLICATE_FIELD_NAME,
|
||||
BY_FIELD_NAME_FIELD.getPreferredName(), OVER_FIELD_NAME_FIELD.getPreferredName(),
|
||||
byFieldName));
|
||||
}
|
||||
|
@ -700,29 +702,29 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
// days of the Splunk app and could be removed now BUT ONLY IF THE C++ CODE IS CHANGED
|
||||
// FIRST - see https://github.com/elastic/x-pack-elasticsearch/issues/858
|
||||
if (COUNT.equals(byFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_COUNT_DISALLOWED,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_COUNT_DISALLOWED,
|
||||
BY_FIELD_NAME_FIELD.getPreferredName()));
|
||||
}
|
||||
if (COUNT.equals(overFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_COUNT_DISALLOWED,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_COUNT_DISALLOWED,
|
||||
OVER_FIELD_NAME_FIELD.getPreferredName()));
|
||||
}
|
||||
|
||||
if (BY.equals(byFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_BY_DISALLOWED,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_BY_DISALLOWED,
|
||||
BY_FIELD_NAME_FIELD.getPreferredName()));
|
||||
}
|
||||
if (BY.equals(overFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_BY_DISALLOWED,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_BY_DISALLOWED,
|
||||
OVER_FIELD_NAME_FIELD.getPreferredName()));
|
||||
}
|
||||
|
||||
if (OVER.equals(byFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_OVER_DISALLOWED,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_OVER_DISALLOWED,
|
||||
BY_FIELD_NAME_FIELD.getPreferredName()));
|
||||
}
|
||||
if (OVER.equals(overFieldName)) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_OVER_DISALLOWED,
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_DETECTOR_OVER_DISALLOWED,
|
||||
OVER_FIELD_NAME_FIELD.getPreferredName()));
|
||||
}
|
||||
|
||||
|
@ -739,17 +741,14 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
/**
|
||||
* Check that the characters used in a field name will not cause problems.
|
||||
*
|
||||
* @param field
|
||||
* The field name to be validated
|
||||
* @return true
|
||||
* @param field The field name to be validated
|
||||
*/
|
||||
public static boolean verifyFieldName(String field) throws ElasticsearchParseException {
|
||||
public static void verifyFieldName(String field) throws ElasticsearchParseException {
|
||||
if (field != null && containsInvalidChar(field)) {
|
||||
throw new IllegalArgumentException(
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_INVALID_FIELDNAME_CHARS, field, Detector.PROHIBITED));
|
||||
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static boolean containsInvalidChar(String field) {
|
||||
|
@ -758,7 +757,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
return true;
|
||||
}
|
||||
}
|
||||
return field.chars().anyMatch(ch -> Character.isISOControl(ch));
|
||||
return field.chars().anyMatch(Character::isISOControl);
|
||||
}
|
||||
|
||||
private void checkScoping(DetectionRule rule) throws ElasticsearchParseException {
|
||||
|
@ -769,7 +768,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
if (!validOptions.contains(condition.getFieldName())) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_INVALID_FIELD_NAME, validOptions,
|
||||
condition.getFieldName());
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -779,7 +778,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
if (targetFieldName != null && !analysisFields.contains(targetFieldName)) {
|
||||
String msg =
|
||||
Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_INVALID_TARGET_FIELD_NAME, analysisFields, targetFieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -833,7 +832,7 @@ public class Detector extends ToXContentToBytes implements Writeable {
|
|||
if (fieldName.equals(detector.byFieldName)) {
|
||||
return ScopingLevel.BY;
|
||||
}
|
||||
throw new IllegalArgumentException(
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
"fieldName '" + fieldName + "' does not match an analysis field");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
|
@ -190,14 +191,14 @@ public class RuleCondition extends ToXContentToBytes implements Writeable {
|
|||
private static void checkCategoricalHasNoField(String fieldName, Object fieldValue) throws ElasticsearchParseException {
|
||||
if (fieldValue != null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_CATEGORICAL_INVALID_OPTION, fieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private static void checkCategoricalHasField(String fieldName, Object fieldValue) throws ElasticsearchParseException {
|
||||
if (fieldValue == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_CATEGORICAL_MISSING_OPTION, fieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -206,7 +207,7 @@ public class RuleCondition extends ToXContentToBytes implements Writeable {
|
|||
checkNumericalHasField(Condition.CONDITION_FIELD.getPreferredName(), ruleCondition.getCondition());
|
||||
if (ruleCondition.getFieldName() != null && ruleCondition.getFieldValue() == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_WITH_FIELD_NAME_REQUIRES_FIELD_VALUE);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
checkNumericalConditionOparatorsAreValid(ruleCondition);
|
||||
}
|
||||
|
@ -214,14 +215,14 @@ public class RuleCondition extends ToXContentToBytes implements Writeable {
|
|||
private static void checkNumericalHasNoField(String fieldName, Object fieldValue) throws ElasticsearchParseException {
|
||||
if (fieldValue != null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_INVALID_OPTION, fieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private static void checkNumericalHasField(String fieldName, Object fieldValue) throws ElasticsearchParseException {
|
||||
if (fieldValue == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_MISSING_OPTION, fieldName);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -229,7 +230,7 @@ public class RuleCondition extends ToXContentToBytes implements Writeable {
|
|||
if (ruleCondition.getFieldValue() != null && ruleCondition.getFieldName() == null) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_MISSING_FIELD_NAME,
|
||||
ruleCondition.getFieldValue());
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -239,7 +240,7 @@ public class RuleCondition extends ToXContentToBytes implements Writeable {
|
|||
Operator operator = ruleCondition.getCondition().getOperator();
|
||||
if (!VALID_CONDITION_OPERATORS.contains(operator)) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_CONDITION_NUMERICAL_INVALID_OPERATOR, operator);
|
||||
throw new IllegalArgumentException(msg);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,6 +42,10 @@ public class ExceptionsHelper {
|
|||
return new ElasticsearchStatusException(msg, RestStatus.CONFLICT, args);
|
||||
}
|
||||
|
||||
public static ElasticsearchStatusException badRequestException(String msg, Throwable cause, Object... args) {
|
||||
return new ElasticsearchStatusException(msg, RestStatus.BAD_REQUEST, cause, args);
|
||||
}
|
||||
|
||||
public static ElasticsearchStatusException badRequestException(String msg, Object... args) {
|
||||
return new ElasticsearchStatusException(msg, RestStatus.BAD_REQUEST, args);
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.datafeed;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -23,7 +24,6 @@ import org.joda.time.DateTimeZone;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
|
@ -118,13 +118,13 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
|
||||
public void testFillDefaults() {
|
||||
DatafeedConfig.Builder expectedDatafeedConfig = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
expectedDatafeedConfig.setIndices(Arrays.asList("index"));
|
||||
expectedDatafeedConfig.setTypes(Arrays.asList("type"));
|
||||
expectedDatafeedConfig.setIndices(Collections.singletonList("index"));
|
||||
expectedDatafeedConfig.setTypes(Collections.singletonList("type"));
|
||||
expectedDatafeedConfig.setQueryDelay(TimeValue.timeValueMinutes(1));
|
||||
expectedDatafeedConfig.setScrollSize(1000);
|
||||
DatafeedConfig.Builder defaultedDatafeedConfig = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
defaultedDatafeedConfig.setIndices(Arrays.asList("index"));
|
||||
defaultedDatafeedConfig.setTypes(Arrays.asList("type"));
|
||||
defaultedDatafeedConfig.setIndices(Collections.singletonList("index"));
|
||||
defaultedDatafeedConfig.setTypes(Collections.singletonList("type"));
|
||||
|
||||
assertEquals(expectedDatafeedConfig.build(), defaultedDatafeedConfig.build());
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
public void testCheckValid_GivenEmptyIndices() throws IOException {
|
||||
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
conf.setIndices(Collections.emptyList());
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, conf::build);
|
||||
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indices", "[]"), e.getMessage());
|
||||
}
|
||||
|
||||
|
@ -147,7 +147,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
indices.add(null);
|
||||
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
conf.setIndices(indices);
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, conf::build);
|
||||
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indices", "[null, null]"), e.getMessage());
|
||||
}
|
||||
|
||||
|
@ -157,7 +157,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
indices.add("");
|
||||
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
conf.setIndices(indices);
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, conf::build);
|
||||
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indices", "[, ]"), e.getMessage());
|
||||
}
|
||||
|
||||
|
@ -183,27 +183,27 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
|
||||
public void testCheckValid_GivenNegativeScrollSize() throws IOException {
|
||||
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> conf.setScrollSize(-1000));
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, () -> conf.setScrollSize(-1000));
|
||||
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "scroll_size", -1000L), e.getMessage());
|
||||
}
|
||||
|
||||
public void testBuild_GivenScriptFieldsAndAggregations() {
|
||||
DatafeedConfig.Builder datafeed = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
datafeed.setIndices(Arrays.asList("my_index"));
|
||||
datafeed.setTypes(Arrays.asList("my_type"));
|
||||
datafeed.setScriptFields(Arrays.asList(new SearchSourceBuilder.ScriptField(randomAlphaOfLength(10),
|
||||
datafeed.setIndices(Collections.singletonList("my_index"));
|
||||
datafeed.setTypes(Collections.singletonList("my_type"));
|
||||
datafeed.setScriptFields(Collections.singletonList(new SearchSourceBuilder.ScriptField(randomAlphaOfLength(10),
|
||||
mockScript(randomAlphaOfLength(10)), randomBoolean())));
|
||||
datafeed.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo")));
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> datafeed.build());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, datafeed::build);
|
||||
|
||||
assertThat(e.getMessage(), equalTo("script_fields cannot be used in combination with aggregations"));
|
||||
}
|
||||
|
||||
public void testHasAggregations_GivenNull() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Arrays.asList("myIndex"));
|
||||
builder.setTypes(Arrays.asList("myType"));
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
DatafeedConfig datafeedConfig = builder.build();
|
||||
|
||||
assertThat(datafeedConfig.hasAggregations(), is(false));
|
||||
|
@ -211,8 +211,8 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
|
||||
public void testHasAggregations_NonEmpty() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Arrays.asList("myIndex"));
|
||||
builder.setTypes(Arrays.asList("myType"));
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(
|
||||
AggregationBuilders.dateHistogram("time").interval(300000)));
|
||||
DatafeedConfig datafeedConfig = builder.build();
|
||||
|
@ -222,33 +222,33 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
|
||||
public void testBuild_GivenEmptyAggregations() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Arrays.asList("myIndex"));
|
||||
builder.setTypes(Arrays.asList("myType"));
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
builder.setAggregations(new AggregatorFactories.Builder());
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.build());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::build);
|
||||
|
||||
assertThat(e.getMessage(), equalTo("A top level date_histogram (or histogram) aggregation is required"));
|
||||
}
|
||||
|
||||
public void testBuild_GivenTopLevelAggIsTerms() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Arrays.asList("myIndex"));
|
||||
builder.setTypes(Arrays.asList("myType"));
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.terms("foo")));
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.build());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::build);
|
||||
|
||||
assertThat(e.getMessage(), equalTo("A top level date_histogram (or histogram) aggregation is required"));
|
||||
}
|
||||
|
||||
public void testBuild_GivenHistogramWithDefaultInterval() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Arrays.asList("myIndex"));
|
||||
builder.setTypes(Arrays.asList("myType"));
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.histogram("time")));
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.build());
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::build);
|
||||
|
||||
assertThat(e.getMessage(), equalTo("Aggregation interval must be greater than 0"));
|
||||
}
|
||||
|
@ -256,14 +256,14 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
public void testBuild_GivenDateHistogramWithInvalidTimeZone() {
|
||||
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("time")
|
||||
.interval(300000L).timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("EST")));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> createDatafeedWithDateHistogram(dateHistogram));
|
||||
|
||||
assertThat(e.getMessage(), equalTo("ML requires date_histogram.time_zone to be UTC"));
|
||||
}
|
||||
|
||||
public void testBuild_GivenDateHistogramWithDefaultInterval() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> createDatafeedWithDateHistogram((String) null));
|
||||
|
||||
assertThat(e.getMessage(), equalTo("Aggregation interval must be greater than 0"));
|
||||
|
@ -286,7 +286,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
}
|
||||
|
||||
public void testBuild_GivenDateHistogramWithMoreThanCalendarWeek() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> createDatafeedWithDateHistogram("8d"));
|
||||
|
||||
assertThat(e.getMessage(), containsString("When specifying a date_histogram calendar interval [8d]"));
|
||||
|
@ -329,8 +329,8 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
|
||||
private static DatafeedConfig createDatafeedWithDateHistogram(DateHistogramAggregationBuilder dateHistogram) {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Arrays.asList("myIndex"));
|
||||
builder.setTypes(Arrays.asList("myType"));
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(dateHistogram));
|
||||
return builder.build();
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -95,10 +96,8 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
det.setPartitionFieldName("sourcetype");
|
||||
AnalysisConfig ac = createConfigWithDetectors(Collections.singletonList(det.build()));
|
||||
|
||||
Set<String> termFields = new TreeSet<>(Arrays.asList(new String[]{
|
||||
"airline", "sourcetype"}));
|
||||
Set<String> analysisFields = new TreeSet<>(Arrays.asList(new String[]{
|
||||
"responsetime", "airline", "sourcetype"}));
|
||||
Set<String> termFields = new TreeSet<>(Arrays.asList("airline", "sourcetype"));
|
||||
Set<String> analysisFields = new TreeSet<>(Arrays.asList("responsetime", "airline", "sourcetype"));
|
||||
|
||||
assertEquals(termFields.size(), ac.termFields().size());
|
||||
assertEquals(analysisFields.size(), ac.analysisFields().size());
|
||||
|
@ -171,12 +170,12 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
AnalysisConfig ac = builder.build();
|
||||
|
||||
|
||||
Set<String> termFields = new TreeSet<>(Arrays.asList(new String[]{
|
||||
Set<String> termFields = new TreeSet<>(Arrays.asList(
|
||||
"by_one", "by_two", "over_field",
|
||||
"partition_one", "partition_two", "Influencer_Field"}));
|
||||
Set<String> analysisFields = new TreeSet<>(Arrays.asList(new String[]{
|
||||
"partition_one", "partition_two", "Influencer_Field"));
|
||||
Set<String> analysisFields = new TreeSet<>(Arrays.asList(
|
||||
"metric1", "metric2", "by_one", "by_two", "over_field",
|
||||
"partition_one", "partition_two", "Influencer_Field"}));
|
||||
"partition_one", "partition_two", "Influencer_Field"));
|
||||
|
||||
assertEquals(termFields.size(), ac.termFields().size());
|
||||
assertEquals(analysisFields.size(), ac.analysisFields().size());
|
||||
|
@ -238,10 +237,10 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
Detector.Builder detector = new Detector.Builder();
|
||||
detector.setFunction("count");
|
||||
detector.setByFieldName("mlcategory");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(detector.build()));
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
|
||||
ac.setCategorizationFieldName(null);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, ac::build);
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertThat(e.getMessage(), equalTo("categorization_field_name must be set for mlcategory to be available"));
|
||||
}
|
||||
|
||||
|
@ -249,10 +248,10 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
Detector.Builder detector = new Detector.Builder();
|
||||
detector.setFunction("count");
|
||||
detector.setOverFieldName("mlcategory");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(detector.build()));
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
|
||||
ac.setCategorizationFieldName(null);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, ac::build);
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertThat(e.getMessage(), equalTo("categorization_field_name must be set for mlcategory to be available"));
|
||||
}
|
||||
|
||||
|
@ -260,10 +259,10 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
Detector.Builder detector = new Detector.Builder();
|
||||
detector.setFunction("count");
|
||||
detector.setPartitionFieldName("mlcategory");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(detector.build()));
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
|
||||
ac.setCategorizationFieldName(null);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, ac::build);
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertThat(e.getMessage(), equalTo("categorization_field_name must be set for mlcategory to be available"));
|
||||
}
|
||||
|
||||
|
@ -271,10 +270,10 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
Detector.Builder detector = new Detector.Builder();
|
||||
detector.setFunction("count");
|
||||
detector.setOverFieldName("foo");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(detector.build()));
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
|
||||
ac.setCategorizationFieldName("msg");
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, ac::build);
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertThat(e.getMessage(), equalTo("categorization_field_name is set but mlcategory is " +
|
||||
"not used in any detector by/over/partition field"));
|
||||
}
|
||||
|
@ -283,11 +282,45 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
Detector.Builder detector = new Detector.Builder();
|
||||
detector.setFunction("count");
|
||||
detector.setOverFieldName("mlcategory");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(detector.build()));
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
|
||||
ac.setCategorizationFieldName("msg");
|
||||
ac.build();
|
||||
}
|
||||
|
||||
public void testBuild_GivenNestedFieldOverlapsNonNested() {
|
||||
Detector.Builder detector1 = new Detector.Builder();
|
||||
detector1.setFunction("count");
|
||||
detector1.setByFieldName("a");
|
||||
Detector.Builder detector2 = new Detector.Builder();
|
||||
detector2.setFunction("count");
|
||||
detector2.setPartitionFieldName("a.b");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(detector1.build(), detector2.build()));
|
||||
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertThat(e.getMessage(), equalTo("Fields a and a.b cannot both be used in the same analysis_config"));
|
||||
}
|
||||
|
||||
public void testBuild_GivenOverlappingNestedFields() {
|
||||
Detector.Builder detector = new Detector.Builder();
|
||||
detector.setFunction("count");
|
||||
detector.setByFieldName("a.b.c");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
|
||||
ac.setInfluencers(Arrays.asList("a.b", "d"));
|
||||
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertThat(e.getMessage(), equalTo("Fields a.b and a.b.c cannot both be used in the same analysis_config"));
|
||||
}
|
||||
|
||||
public void testBuild_GivenNonOverlappingNestedFields() {
|
||||
Detector.Builder detector = new Detector.Builder();
|
||||
detector.setFunction("count");
|
||||
detector.setByFieldName("a.b.c");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(detector.build()));
|
||||
ac.setInfluencers(Arrays.asList("a.b.c", "a.b.d"));
|
||||
|
||||
ac.build();
|
||||
}
|
||||
|
||||
public void testEquals_GivenSameReference() {
|
||||
AnalysisConfig config = createFullyPopulatedConfig();
|
||||
assertTrue(config.equals(config));
|
||||
|
@ -347,11 +380,11 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
|
||||
public void testEquals_GivenDifferentInfluencers() {
|
||||
AnalysisConfig.Builder builder = createConfigBuilder();
|
||||
builder.setInfluencers(Arrays.asList("foo"));
|
||||
builder.setInfluencers(Collections.singletonList("foo"));
|
||||
AnalysisConfig config1 = builder.build();
|
||||
|
||||
builder = createConfigBuilder();
|
||||
builder.setInfluencers(Arrays.asList("bar"));
|
||||
builder.setInfluencers(Collections.singletonList("bar"));
|
||||
AnalysisConfig config2 = builder.build();
|
||||
|
||||
assertFalse(config1.equals(config2));
|
||||
|
@ -410,13 +443,15 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
}
|
||||
|
||||
public void testExtractReferencedLists() {
|
||||
DetectionRule rule1 = new DetectionRule.Builder(Arrays.asList(RuleCondition.createCategorical("foo", "filter1"))).build();
|
||||
DetectionRule rule2 = new DetectionRule.Builder(Arrays.asList(RuleCondition.createCategorical("foo", "filter2"))).build();
|
||||
DetectionRule rule1 = new DetectionRule.Builder(Collections.singletonList(RuleCondition.createCategorical("foo",
|
||||
"filter1"))).build();
|
||||
DetectionRule rule2 = new DetectionRule.Builder(Collections.singletonList(RuleCondition.createCategorical("foo",
|
||||
"filter2"))).build();
|
||||
Detector.Builder detector1 = new Detector.Builder("count", null);
|
||||
detector1.setByFieldName("foo");
|
||||
detector1.setDetectorRules(Arrays.asList(rule1));
|
||||
detector1.setDetectorRules(Collections.singletonList(rule1));
|
||||
Detector.Builder detector2 = new Detector.Builder("count", null);
|
||||
detector2.setDetectorRules(Arrays.asList(rule2));
|
||||
detector2.setDetectorRules(Collections.singletonList(rule2));
|
||||
detector2.setByFieldName("foo");
|
||||
AnalysisConfig config = new AnalysisConfig.Builder(
|
||||
Arrays.asList(detector1.build(), detector2.build(), new Detector.Builder("count", null).build())).build();
|
||||
|
@ -431,8 +466,8 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
Collections.singletonList(detector.build()));
|
||||
builder.setBucketSpan(TimeValue.timeValueHours(1));
|
||||
builder.setCategorizationFieldName("cat");
|
||||
builder.setCategorizationFilters(Arrays.asList("foo"));
|
||||
builder.setInfluencers(Arrays.asList("myInfluencer"));
|
||||
builder.setCategorizationFilters(Collections.singletonList("foo"));
|
||||
builder.setInfluencers(Collections.singletonList("myInfluencer"));
|
||||
builder.setLatency(TimeValue.timeValueSeconds(3600));
|
||||
builder.setSummaryCountFieldName("sumCount");
|
||||
return builder.build();
|
||||
|
@ -455,7 +490,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
d = new Detector.Builder("distinct_count", null).build();
|
||||
new AnalysisConfig.Builder(Collections.singletonList(d)).build();
|
||||
assertTrue(false); // shouldn't get here
|
||||
} catch (IllegalArgumentException e) {
|
||||
} catch (ElasticsearchException e) {
|
||||
assertEquals("Unless the function is 'count' one of field_name, by_field_name or over_field_name must be set", e.getMessage());
|
||||
}
|
||||
|
||||
|
@ -466,7 +501,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
|
||||
builder = new Detector.Builder("info_content", "somefield");
|
||||
builder.setOverFieldName("over_field");
|
||||
d = builder.build();
|
||||
builder.build();
|
||||
new AnalysisConfig.Builder(Collections.singletonList(builder.build())).build();
|
||||
|
||||
builder.setByFieldName("by_field");
|
||||
|
@ -477,7 +512,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
builder.setOverFieldName("over_field");
|
||||
new AnalysisConfig.Builder(Collections.singletonList(builder.build())).build();
|
||||
assertTrue(false); // shouldn't get here
|
||||
} catch (IllegalArgumentException e) {
|
||||
} catch (ElasticsearchException e) {
|
||||
assertEquals("Unknown function 'made_up_function'", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
@ -486,7 +521,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
AnalysisConfig.Builder config = createValidConfig();
|
||||
config.setBucketSpan(TimeValue.timeValueSeconds(-1));
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> config.build());
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, config::build);
|
||||
|
||||
assertEquals("bucket_span cannot be less or equal than 0. Value = -1", e.getMessage());
|
||||
}
|
||||
|
@ -495,7 +530,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
AnalysisConfig.Builder analysisConfig = createValidConfig();
|
||||
analysisConfig.setLatency(TimeValue.timeValueSeconds(-1));
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> analysisConfig.build());
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, analysisConfig::build);
|
||||
|
||||
assertEquals("latency cannot be less than 0. Value = -1", e.getMessage());
|
||||
}
|
||||
|
@ -504,7 +539,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
AnalysisConfig.Builder analysisConfig = createValidConfig();
|
||||
analysisConfig.setDetectors(null);
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> analysisConfig.build());
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, analysisConfig::build);
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_NO_DETECTORS), e.getMessage());
|
||||
}
|
||||
|
@ -593,7 +628,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
builder.setByFieldName("value");
|
||||
detectors.add(builder.build());
|
||||
analysisConfig.setDetectors(detectors);
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, analysisConfig::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, analysisConfig::build);
|
||||
assertEquals("Overlapping buckets cannot be used with function '[rare]'", e.getMessage());
|
||||
|
||||
// Test overlappingBuckets set
|
||||
|
@ -625,7 +660,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
ac.setDetectors(detectors);
|
||||
|
||||
ac.setBucketSpan(TimeValue.timeValueSeconds(4L));
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, ac::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_MULTIPLE_BUCKETSPANS_MUST_BE_MULTIPLE, "10s", "4s"), e.getMessage());
|
||||
|
||||
ac.setBucketSpan(TimeValue.timeValueSeconds(5L));
|
||||
|
@ -650,23 +685,23 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
TimeValue.timeValueSeconds(35L)));
|
||||
|
||||
ac.setBucketSpan(TimeValue.timeValueSeconds(222L));
|
||||
ac.setMultipleBucketSpans(Arrays.asList());
|
||||
ac.setMultipleBucketSpans(Collections.emptyList());
|
||||
ac.build();
|
||||
|
||||
ac.setMultipleBucketSpans(Arrays.asList(TimeValue.timeValueSeconds(222L)));
|
||||
e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> ac.build());
|
||||
ac.setMultipleBucketSpans(Collections.singletonList(TimeValue.timeValueSeconds(222L)));
|
||||
e = ESTestCase.expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_MULTIPLE_BUCKETSPANS_MUST_BE_MULTIPLE, "3.7m", "3.7m"), e.getMessage());
|
||||
|
||||
ac.setMultipleBucketSpans(Arrays.asList(TimeValue.timeValueSeconds(-444L), TimeValue.timeValueSeconds(-888L)));
|
||||
e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> ac.build());
|
||||
e = ESTestCase.expectThrows(ElasticsearchException.class, ac::build);
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_MULTIPLE_BUCKETSPANS_MUST_BE_MULTIPLE, -444, "3.7m"), e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenCategorizationFiltersButNoCategorizationFieldName() {
|
||||
AnalysisConfig.Builder config = createValidConfig();
|
||||
config.setCategorizationFilters(Arrays.asList("foo"));
|
||||
config.setCategorizationFilters(Collections.singletonList("foo"));
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> config.build());
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, config::build);
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_REQUIRE_CATEGORIZATION_FIELD_NAME), e.getMessage());
|
||||
}
|
||||
|
@ -675,7 +710,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
AnalysisConfig.Builder config = createValidCategorizationConfig();
|
||||
config.setCategorizationFilters(Arrays.asList("foo", "bar", "foo"));
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> config.build());
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, config::build);
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_DUPLICATES), e.getMessage());
|
||||
}
|
||||
|
@ -684,7 +719,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
AnalysisConfig.Builder config = createValidCategorizationConfig();
|
||||
config.setCategorizationFilters(Arrays.asList("foo", ""));
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> config.build());
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, config::build);
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY), e.getMessage());
|
||||
}
|
||||
|
@ -694,7 +729,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
AnalysisConfig.Builder config = createValidConfig();
|
||||
config.setUsePerPartitionNormalization(true);
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> config.build());
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, config::build);
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_REQUIRES_PARTITION_FIELD), e.getMessage());
|
||||
}
|
||||
|
@ -718,7 +753,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
config.setInfluencers(Arrays.asList("inf1", "inf2"));
|
||||
config.setUsePerPartitionNormalization(true);
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> config.build());
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, config::build);
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_CANNOT_USE_INFLUENCERS), e.getMessage());
|
||||
}
|
||||
|
@ -727,7 +762,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
AnalysisConfig.Builder config = createValidCategorizationConfig();
|
||||
config.setCategorizationFilters(Arrays.asList("foo", "("));
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> config.build());
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, config::build);
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_INVALID_REGEX, "("), e.getMessage());
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
@ -63,7 +64,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
|
|||
}
|
||||
|
||||
public void testVerify_GivenNegativeCategorizationExamplesLimit() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new AnalysisLimits(1L, -1L));
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> new AnalysisLimits(1L, -1L));
|
||||
String errorMessage = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW,
|
||||
AnalysisLimits.CATEGORIZATION_EXAMPLES_LIMIT, 0, -1L);
|
||||
assertEquals(errorMessage, e.getMessage());
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -68,17 +69,17 @@ public class ConditionTests extends AbstractSerializingTestCase<Condition> {
|
|||
}
|
||||
|
||||
public void testVerify_GivenEmptyValue() {
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> new Condition(Operator.LT, ""));
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, () -> new Condition(Operator.LT, ""));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NUMBER, ""), e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenInvalidRegex() {
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, () -> new Condition(Operator.MATCH, "[*"));
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, () -> new Condition(Operator.MATCH, "[*"));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_REGEX, "[*"), e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNullRegex() {
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class,
|
||||
() -> new Condition(Operator.MATCH, null));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NULL, "[*"), e.getMessage());
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
|
@ -57,15 +58,15 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
|||
DataDescription.Builder description = new DataDescription.Builder();
|
||||
expectThrows(IllegalArgumentException.class, () -> description.setTimeFormat(null));
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> description.setTimeFormat("invalid"));
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("invalid"));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "invalid"), e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> description.setTimeFormat(""));
|
||||
e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat(""));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, ""), e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> description.setTimeFormat("y-M-dd"));
|
||||
e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("y-M-dd"));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "y-M-dd"), e.getMessage());
|
||||
expectThrows(IllegalArgumentException.class, () -> description.setTimeFormat("YYY-mm-UU hh:mm:ssY"));
|
||||
expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("YYY-mm-UU hh:mm:ssY"));
|
||||
}
|
||||
|
||||
public void testTransform_GivenDelimitedAndEpoch() {
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -90,7 +91,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
builder.setDetectorRules(Collections.singletonList(rule));
|
||||
builder.setByFieldName(null);
|
||||
detector = builder.build();
|
||||
assertEquals(Arrays.asList("over_field"), detector.extractAnalysisFields());
|
||||
assertEquals(Collections.singletonList("over_field"), detector.extractAnalysisFields());
|
||||
builder = new Detector.Builder(detector);
|
||||
rule = new DetectionRule.Builder(
|
||||
Collections.singletonList(new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, condition, null)))
|
||||
|
@ -106,8 +107,8 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
public void testExtractReferencedLists() {
|
||||
Detector.Builder builder = createDetector();
|
||||
builder.setDetectorRules(Arrays.asList(
|
||||
new DetectionRule.Builder(Arrays.asList(RuleCondition.createCategorical("by_field", "list1"))).build(),
|
||||
new DetectionRule.Builder(Arrays.asList(RuleCondition.createCategorical("by_field", "list2"))).build()));
|
||||
new DetectionRule.Builder(Collections.singletonList(RuleCondition.createCategorical("by_field", "list1"))).build(),
|
||||
new DetectionRule.Builder(Collections.singletonList(RuleCondition.createCategorical("by_field", "list2"))).build()));
|
||||
|
||||
Detector detector = builder.build();
|
||||
assertEquals(new HashSet<>(Arrays.asList("list1", "list2")), detector.extractReferencedFilters());
|
||||
|
@ -127,7 +128,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
.setTargetFieldValue("targetValue")
|
||||
.setConditionsConnective(Connective.AND)
|
||||
.build();
|
||||
detector.setDetectorRules(Arrays.asList(rule));
|
||||
detector.setDetectorRules(Collections.singletonList(rule));
|
||||
return detector;
|
||||
}
|
||||
|
||||
|
@ -218,49 +219,49 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
private static void verifyFieldName(Detector.Builder detector, String character, boolean valid) {
|
||||
Detector.Builder updated = createDetectorWithSpecificFieldName(detector.build().getFieldName() + character);
|
||||
if (valid == false) {
|
||||
expectThrows(IllegalArgumentException.class , () -> updated.build());
|
||||
expectThrows(ElasticsearchException.class , updated::build);
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyByFieldName(Detector.Builder detector, String character, boolean valid) {
|
||||
detector.setByFieldName(detector.build().getByFieldName() + character);
|
||||
if (valid == false) {
|
||||
expectThrows(IllegalArgumentException.class , () -> detector.build());
|
||||
expectThrows(ElasticsearchException.class , detector::build);
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyOverFieldName(Detector.Builder detector, String character, boolean valid) {
|
||||
detector.setOverFieldName(detector.build().getOverFieldName() + character);
|
||||
if (valid == false) {
|
||||
expectThrows(IllegalArgumentException.class , () -> detector.build());
|
||||
expectThrows(ElasticsearchException.class , detector::build);
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyPartitionFieldName(Detector.Builder detector, String character, boolean valid) {
|
||||
detector.setPartitionFieldName(detector.build().getPartitionFieldName() + character);
|
||||
if (valid == false) {
|
||||
expectThrows(IllegalArgumentException.class , () -> detector.build());
|
||||
expectThrows(ElasticsearchException.class , detector::build);
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyFieldNameGivenPresummarised(Detector.Builder detector, String character, boolean valid) {
|
||||
Detector.Builder updated = createDetectorWithSpecificFieldName(detector.build().getFieldName() + character);
|
||||
expectThrows(IllegalArgumentException.class , () -> updated.build(true));
|
||||
expectThrows(ElasticsearchException.class , () -> updated.build(true));
|
||||
}
|
||||
|
||||
private static void verifyByFieldNameGivenPresummarised(Detector.Builder detector, String character, boolean valid) {
|
||||
detector.setByFieldName(detector.build().getByFieldName() + character);
|
||||
expectThrows(IllegalArgumentException.class , () -> detector.build(true));
|
||||
expectThrows(ElasticsearchException.class , () -> detector.build(true));
|
||||
}
|
||||
|
||||
private static void verifyOverFieldNameGivenPresummarised(Detector.Builder detector, String character, boolean valid) {
|
||||
detector.setOverFieldName(detector.build().getOverFieldName() + character);
|
||||
expectThrows(IllegalArgumentException.class , () -> detector.build(true));
|
||||
expectThrows(ElasticsearchException.class , () -> detector.build(true));
|
||||
}
|
||||
|
||||
private static void verifyPartitionFieldNameGivenPresummarised(Detector.Builder detector, String character, boolean valid) {
|
||||
detector.setPartitionFieldName(detector.build().getPartitionFieldName() + character);
|
||||
expectThrows(IllegalArgumentException.class , () -> detector.build(true));
|
||||
expectThrows(ElasticsearchException.class , () -> detector.build(true));
|
||||
}
|
||||
|
||||
private static Detector.Builder createDetectorWithValidFieldNames() {
|
||||
|
@ -318,13 +319,13 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
for (String f : difference) {
|
||||
try {
|
||||
new Detector.Builder(f, null).build();
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
try {
|
||||
new Detector.Builder(f, null).build(true);
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -337,13 +338,13 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
builder.setOverFieldName("over_field");
|
||||
try {
|
||||
builder.build();
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
try {
|
||||
builder.build(true);
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -359,13 +360,13 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
builder.setOverFieldName("over_field");
|
||||
try {
|
||||
builder.build();
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
try {
|
||||
builder.build(true);
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -402,7 +403,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
try {
|
||||
builder.build(true);
|
||||
Assert.assertFalse(Detector.METRIC.equals(f));
|
||||
} catch (IllegalArgumentException e) {
|
||||
} catch (ElasticsearchException e) {
|
||||
// "metric" is not allowed as the function for pre-summarised input
|
||||
Assert.assertEquals(Detector.METRIC, f);
|
||||
}
|
||||
|
@ -446,13 +447,13 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
builder.setOverFieldName("over_field");
|
||||
try {
|
||||
builder.build();
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
try {
|
||||
builder.build(true);
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -486,7 +487,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
try {
|
||||
builder.build(true);
|
||||
Assert.assertFalse(Detector.METRIC.equals(f));
|
||||
} catch (IllegalArgumentException e) {
|
||||
} catch (ElasticsearchException e) {
|
||||
// "metric" is not allowed as the function for pre-summarised input
|
||||
Assert.assertEquals(Detector.METRIC, f);
|
||||
}
|
||||
|
@ -501,15 +502,15 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
builder = new Detector.Builder(f, "field");
|
||||
builder.setByFieldName("b");
|
||||
builder.build();
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
try {
|
||||
builder = new Detector.Builder(f, "field");
|
||||
builder.setByFieldName("b");
|
||||
builder.build(true);
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
}
|
||||
Assert.assertEquals(Detector.COUNT_WITHOUT_FIELD_FUNCTIONS.size(), testedFunctionsCount);
|
||||
|
@ -519,13 +520,13 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
builder.setOverFieldName("over_field");
|
||||
try {
|
||||
builder.build();
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
try {
|
||||
builder.build(true);
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
|
||||
|
||||
|
@ -560,16 +561,16 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
builder.setByFieldName("by_field");
|
||||
builder.setOverFieldName("over_field");
|
||||
builder.build();
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
try {
|
||||
builder = new Detector.Builder(f, "field");
|
||||
builder.setByFieldName("by_field");
|
||||
builder.setOverFieldName("over_field");
|
||||
builder.build(true);
|
||||
Assert.fail("IllegalArgumentException not thrown when expected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
Assert.fail("ElasticsearchException not thrown when expected");
|
||||
} catch (ElasticsearchException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -580,10 +581,10 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
detector.setPartitionFieldName("instance");
|
||||
RuleCondition ruleCondition =
|
||||
new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, "metricName", "metricVale", new Condition(Operator.LT, "5"), null);
|
||||
DetectionRule rule = new DetectionRule.Builder(Arrays.asList(ruleCondition)).setTargetFieldName("instancE").build();
|
||||
detector.setDetectorRules(Arrays.asList(rule));
|
||||
DetectionRule rule = new DetectionRule.Builder(Collections.singletonList(ruleCondition)).setTargetFieldName("instancE").build();
|
||||
detector.setDetectorRules(Collections.singletonList(rule));
|
||||
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_DETECTION_RULE_INVALID_TARGET_FIELD_NAME,
|
||||
"[metricName, instance]", "instancE"),
|
||||
|
@ -596,8 +597,8 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
detector.setPartitionFieldName("instance");
|
||||
RuleCondition ruleCondition =
|
||||
new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, "metricName", "CPU", new Condition(Operator.LT, "5"), null);
|
||||
DetectionRule rule = new DetectionRule.Builder(Arrays.asList(ruleCondition)).setTargetFieldName("instance").build();
|
||||
detector.setDetectorRules(Arrays.asList(rule));
|
||||
DetectionRule rule = new DetectionRule.Builder(Collections.singletonList(ruleCondition)).setTargetFieldName("instance").build();
|
||||
detector.setDetectorRules(Collections.singletonList(rule));
|
||||
detector.build();
|
||||
}
|
||||
|
||||
|
@ -605,7 +606,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setByFieldName("x");
|
||||
detector.setPartitionFieldName("x");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("partition_field_name and by_field_name cannot be the same: 'x'", e.getMessage());
|
||||
}
|
||||
|
@ -614,7 +615,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setByFieldName("x");
|
||||
detector.setOverFieldName("x");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("by_field_name and over_field_name cannot be the same: 'x'", e.getMessage());
|
||||
}
|
||||
|
@ -623,7 +624,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setOverFieldName("x");
|
||||
detector.setPartitionFieldName("x");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("partition_field_name and over_field_name cannot be the same: 'x'", e.getMessage());
|
||||
}
|
||||
|
@ -631,7 +632,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
public void testVerify_GivenByIsCount() {
|
||||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setByFieldName("count");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("'count' is not a permitted value for by_field_name", e.getMessage());
|
||||
}
|
||||
|
@ -639,7 +640,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
public void testVerify_GivenOverIsCount() {
|
||||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setOverFieldName("count");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("'count' is not a permitted value for over_field_name", e.getMessage());
|
||||
}
|
||||
|
@ -647,7 +648,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
public void testVerify_GivenByIsBy() {
|
||||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setByFieldName("by");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("'by' is not a permitted value for by_field_name", e.getMessage());
|
||||
}
|
||||
|
@ -655,7 +656,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
public void testVerify_GivenOverIsBy() {
|
||||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setOverFieldName("by");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("'by' is not a permitted value for over_field_name", e.getMessage());
|
||||
}
|
||||
|
@ -663,7 +664,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
public void testVerify_GivenByIsOver() {
|
||||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setByFieldName("over");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("'over' is not a permitted value for by_field_name", e.getMessage());
|
||||
}
|
||||
|
@ -671,7 +672,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
|||
public void testVerify_GivenOverIsOver() {
|
||||
Detector.Builder detector = new Detector.Builder("count", "");
|
||||
detector.setOverFieldName("over");
|
||||
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, detector::build);
|
||||
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, detector::build);
|
||||
|
||||
assertEquals("'over' is not a permitted value for over_field_name", e.getMessage());
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
@ -110,88 +111,88 @@ public class RuleConditionTests extends AbstractSerializingTestCase<RuleConditio
|
|||
|
||||
public void testVerify_GivenCategoricalWithCondition() {
|
||||
Condition condition = new Condition(Operator.MATCH, "text");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.CATEGORICAL, null, null, condition, null));
|
||||
assertEquals("Invalid detector rule: a categorical rule_condition does not support condition", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenCategoricalWithFieldValue() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.CATEGORICAL, "metric", "CPU", null, null));
|
||||
assertEquals("Invalid detector rule: a categorical rule_condition does not support field_value", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenCategoricalWithoutValueFilter() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.CATEGORICAL, null, null, null, null));
|
||||
assertEquals("Invalid detector rule: a categorical rule_condition requires value_filter to be set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalActualWithValueFilter() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, null, "myFilter"));
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition does not support value_filter", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalActualWithoutCondition() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, null, null));
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition requires condition to be set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalActualWithFieldNameButNoFieldValue() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, "metric", null, new Condition(Operator.LT, "5"), null));
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition with field_name requires that field_value is set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalTypicalWithValueFilter() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, null, "myFilter"));
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition does not support value_filter", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalTypicalWithoutCondition() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, null, null));
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition requires condition to be set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalDiffAbsWithValueFilter() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_DIFF_ABS, null, null, null, "myFilter"));
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition does not support value_filter", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalDiffAbsWithoutCondition() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_DIFF_ABS, null, null, null, null));
|
||||
assertEquals("Invalid detector rule: a numerical rule_condition requires condition to be set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenFieldValueWithoutFieldName() {
|
||||
Condition condition = new Condition(Operator.LTE, "5");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_DIFF_ABS, null, "foo", condition, null));
|
||||
assertEquals("Invalid detector rule: missing field_name in rule_condition where field_value 'foo' is set", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalAndOperatorEquals() {
|
||||
Condition condition = new Condition(Operator.EQ, "5");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, condition, null));
|
||||
assertEquals("Invalid detector rule: operator 'eq' is not allowed", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenNumericalAndOperatorMatch() {
|
||||
Condition condition = new Condition(Operator.MATCH, "aaa");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, null, null, condition, null));
|
||||
assertEquals("Invalid detector rule: operator 'match' is not allowed", e.getMessage());
|
||||
}
|
||||
|
||||
public void testVerify_GivenDetectionRuleWithInvalidCondition() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class,
|
||||
() -> new RuleCondition(RuleConditionType.NUMERICAL_ACTUAL, "metricName", "CPU", new Condition(Operator.LT, "invalid"),
|
||||
null));
|
||||
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NUMBER, "invalid"), e.getMessage());
|
||||
|
|
Loading…
Reference in New Issue