[TEST] Adds mutate function to some tests (elastic/x-pack-elasticsearch#2263)

Original commit: elastic/x-pack-elasticsearch@560d7e9a80
This commit is contained in:
Colin Goodheart-Smithe 2017-08-21 11:20:14 +01:00 committed by GitHub
parent 1b6d9d430c
commit 0a1225f934
14 changed files with 617 additions and 8 deletions

View File

@ -102,6 +102,10 @@ public class ChunkingConfig implements ToXContentObject, Writeable {
return mode != Mode.OFF;
}
Mode getMode() {
return mode;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View File

@ -12,7 +12,6 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -23,8 +22,6 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
@ -35,7 +32,6 @@ import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.utils.MlStrings;
import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
@ -454,7 +450,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
ExceptionsHelper.requireNonNull(id, ID.getPreferredName());
ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
if (!MlStrings.isValidId(id)) {
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName()));
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName(), id));
}
if (indices == null || indices.isEmpty() || indices.contains(null) || indices.contains("")) {
throw invalidOptionValue(INDICES.getPreferredName(), indices);

View File

@ -20,11 +20,13 @@ import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils;
import org.elasticsearch.xpack.ml.job.config.Job;
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
@ -199,6 +201,61 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
}
}
String getJobId() {
return jobId;
}
TimeValue getQueryDelay() {
return queryDelay;
}
TimeValue getFrequency() {
return frequency;
}
List<String> getIndices() {
return indices;
}
List<String> getTypes() {
return types;
}
Integer getScrollSize() {
return scrollSize;
}
QueryBuilder getQuery() {
return query;
}
AggregatorFactories.Builder getAggregations() {
return aggregations;
}
/**
* Returns the histogram's interval as epoch millis.
*/
long getHistogramIntervalMillis() {
return ExtractorUtils.getHistogramIntervalMillis(aggregations);
}
/**
* @return {@code true} when there are non-empty aggregations, {@code false}
* otherwise
*/
boolean hasAggregations() {
return aggregations != null && aggregations.count() > 0;
}
List<SearchSourceBuilder.ScriptField> getScriptFields() {
return scriptFields == null ? Collections.emptyList() : scriptFields;
}
ChunkingConfig getChunkingConfig() {
return chunkingConfig;
}
/**
* Applies the update to the given {@link DatafeedConfig}
* @return a new {@link DatafeedConfig} that contains the update

View File

@ -29,8 +29,10 @@ import org.elasticsearch.xpack.ml.job.config.JobTaskStatus;
import org.elasticsearch.xpack.ml.job.config.JobTests;
import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData;
import java.io.IOException;
import java.util.Collections;
import java.util.Date;
import java.util.Map;
import static org.elasticsearch.xpack.ml.action.OpenJobActionTests.addJobTask;
import static org.elasticsearch.xpack.ml.datafeed.DatafeedManagerTests.createDatafeedConfig;
@ -395,4 +397,44 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
}
return builder;
}
@Override
protected MlMetadata mutateInstance(MlMetadata instance) throws IOException {
Map<String, Job> jobs = instance.getJobs();
Map<String, DatafeedConfig> datafeeds = instance.getDatafeeds();
MlMetadata.Builder metadataBuilder = new MlMetadata.Builder();
for (Map.Entry<String, Job> entry : jobs.entrySet()) {
metadataBuilder.putJob(entry.getValue(), true);
}
for (Map.Entry<String, DatafeedConfig> entry : datafeeds.entrySet()) {
metadataBuilder.putDatafeed(entry.getValue());
}
switch (between(0, 1)) {
case 0:
metadataBuilder.putJob(JobTests.createRandomizedJob(), true);
break;
case 1:
// Because we check if the job for the datafeed exists and we don't
// allow two datafeeds to exist for a single job we have to add both
// a job and a datafeed here
Job randomJob = JobTests.createRandomizedJob();
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(randomJob.getAnalysisConfig());
analysisConfig.setLatency(null);
DatafeedConfig datafeedConfig = DatafeedConfigTests.createRandomizedDatafeedConfig(randomJob.getId(),
randomJob.getAnalysisConfig().getBucketSpan().millis());
if (datafeedConfig.hasAggregations()) {
analysisConfig.setSummaryCountFieldName("doc_count");
}
randomJob = new Job.Builder(randomJob).setAnalysisConfig(analysisConfig).build();
metadataBuilder.putJob(randomJob, false);
metadataBuilder.putDatafeed(datafeedConfig);
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return metadataBuilder.build();
}
}

View File

@ -9,6 +9,8 @@ import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
public class PageParamsTests extends AbstractSerializingTestCase<PageParams> {
@Override
@ -53,4 +55,31 @@ public class PageParamsTests extends AbstractSerializingTestCase<PageParams> {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new PageParams(0, 10001));
assertEquals("The sum of parameters [from] and [size] cannot be higher than 10000.", e.getMessage());
}
@Override
protected PageParams mutateInstance(PageParams instance) throws IOException {
int from = instance.getFrom();
int size = instance.getSize();
switch (between(0, 1)) {
case 0:
from += between(1, 20);
// If we have gone above the limit for max and size then we need to
// change size too
if ((from + size) > PageParams.MAX_FROM_SIZE_SUM) {
size = PageParams.MAX_FROM_SIZE_SUM - from;
}
break;
case 1:
size += between(1, 20);
// If we have gone above the limit for max and size then we need to
// change from too
if ((from + size) > PageParams.MAX_FROM_SIZE_SUM) {
from = PageParams.MAX_FROM_SIZE_SUM - size;
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new PageParams(from, size);
}
}

View File

@ -10,8 +10,10 @@ import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.ml.job.results.Influencer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class QueryPageTests extends AbstractWireSerializingTestCase<QueryPage<Influencer>> {
@ -30,4 +32,24 @@ public class QueryPageTests extends AbstractWireSerializingTestCase<QueryPage<In
protected Reader<QueryPage<Influencer>> instanceReader() {
return (in) -> new QueryPage<>(in, Influencer::new);
}
@Override
protected QueryPage<Influencer> mutateInstance(QueryPage<Influencer> instance) throws IOException {
ParseField resultsField = instance.getResultsField();
List<Influencer> page = instance.results();
long count = instance.count();
switch (between(0, 1)) {
case 0:
page = new ArrayList<>(page);
page.add(new Influencer(randomAlphaOfLengthBetween(10, 20), randomAlphaOfLengthBetween(10, 20),
randomAlphaOfLengthBetween(10, 20), new Date(randomNonNegativeLong()), randomNonNegativeLong()));
break;
case 1:
count += between(1, 20);
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new QueryPage<>(page, count, resultsField);
}
}

View File

@ -10,7 +10,10 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.is;
@ -63,4 +66,34 @@ public class ChunkingConfigTests extends AbstractSerializingTestCase<ChunkingCon
private static TimeValue randomPositiveSecondsMinutesHours() {
return new TimeValue(randomIntBetween(1, 1000), randomFrom(Arrays.asList(TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS)));
}
@Override
protected ChunkingConfig mutateInstance(ChunkingConfig instance) throws IOException {
ChunkingConfig.Mode mode = instance.getMode();
TimeValue timeSpan = instance.getTimeSpan();
switch (between(0, 1)) {
case 0:
List<ChunkingConfig.Mode> modes = new ArrayList<>(Arrays.asList(ChunkingConfig.Mode.values()));
modes.remove(mode);
mode = randomFrom(modes);
if (mode == ChunkingConfig.Mode.MANUAL) {
timeSpan = randomPositiveSecondsMinutesHours();
} else {
timeSpan = null;
}
break;
case 1:
if (timeSpan == null) {
timeSpan = randomPositiveSecondsMinutesHours();
} else {
timeSpan = new TimeValue(timeSpan.getMillis() + between(10, 10000));
}
// only manual mode allows a timespan
mode = ChunkingConfig.Mode.MANUAL;
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new ChunkingConfig(mode, timeSpan);
}
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.datafeed;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
@ -15,9 +16,11 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
@ -26,8 +29,10 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilde
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.datafeed.ChunkingConfig.Mode;
import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.joda.time.DateTimeZone;
@ -397,4 +402,80 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(dateHistogram));
return builder.build();
}
@Override
protected DatafeedConfig mutateInstance(DatafeedConfig instance) throws IOException {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(instance);
switch (between(0, 10)) {
case 0:
builder.setId(instance.getId() + randomValidDatafeedId());
break;
case 1:
builder.setJobId(instance.getJobId() + randomAlphaOfLength(5));
break;
case 2:
builder.setQueryDelay(new TimeValue(instance.getQueryDelay().millis() + between(100, 100000)));
break;
case 3:
if (instance.getFrequency() == null) {
builder.setFrequency(new TimeValue(between(1, 10) * 1000));
} else {
builder.setFrequency(new TimeValue(instance.getFrequency().millis() + between(1, 10) * 1000));
}
break;
case 4:
List<String> indices = new ArrayList<>(instance.getIndices());
indices.add(randomAlphaOfLengthBetween(1, 20));
builder.setIndices(indices);
break;
case 5:
List<String> types = new ArrayList<>(instance.getTypes());
types.add(randomAlphaOfLengthBetween(1, 20));
builder.setTypes(types);
break;
case 6:
BoolQueryBuilder query = new BoolQueryBuilder();
if (instance.getQuery() != null) {
query.must(instance.getQuery());
}
query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
builder.setQuery(query);
break;
case 7:
if (instance.hasAggregations()) {
builder.setAggregations(null);
} else {
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
String timeField = randomAlphaOfLength(10);
aggBuilder
.addAggregator(new DateHistogramAggregationBuilder(timeField).field(timeField).interval(between(10000, 3600000))
.subAggregation(new MaxAggregationBuilder(timeField).field(timeField)));
builder.setAggregations(aggBuilder);
if (instance.getScriptFields().isEmpty() == false) {
builder.setScriptFields(Collections.emptyList());
}
}
break;
case 8:
ArrayList<ScriptField> scriptFields = new ArrayList<>(instance.getScriptFields());
scriptFields.add(new ScriptField(randomAlphaOfLengthBetween(1, 10), new Script("foo"), true));
builder.setScriptFields(scriptFields);
builder.setAggregations(null);
break;
case 9:
builder.setScrollSize(instance.getScrollSize() + between(1, 100));
break;
case 10:
if (instance.getChunkingConfig() == null || instance.getChunkingConfig().getMode() == Mode.AUTO) {
ChunkingConfig newChunkingConfig = ChunkingConfig.newManual(new TimeValue(randomNonNegativeLong()));
builder.setChunkingConfig(newChunkingConfig);
} else {
builder.setChunkingConfig(null);
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return builder.build();
}
}

View File

@ -11,13 +11,19 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.ml.datafeed.ChunkingConfig.Mode;
import java.io.IOException;
import java.util.ArrayList;
@ -179,4 +185,97 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
equalTo(new AggregatorFactories.Builder().addAggregator(
AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime))));
}
@Override
protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) throws IOException {
DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(instance);
switch (between(0, 10)) {
case 0:
builder.setId(instance.getId() + DatafeedConfigTests.randomValidDatafeedId());
break;
case 1:
builder.setJobId(instance.getJobId() + randomAlphaOfLength(5));
break;
case 2:
if (instance.getQueryDelay() == null) {
builder.setQueryDelay(new TimeValue(between(100, 100000)));
} else {
builder.setQueryDelay(new TimeValue(instance.getQueryDelay().millis() + between(100, 100000)));
}
break;
case 3:
if (instance.getFrequency() == null) {
builder.setFrequency(new TimeValue(between(1, 10) * 1000));
} else {
builder.setFrequency(new TimeValue(instance.getFrequency().millis() + between(1, 10) * 1000));
}
break;
case 4:
List<String> indices;
if (instance.getIndices() == null) {
indices = new ArrayList<>();
} else {
indices = new ArrayList<>(instance.getIndices());
}
indices.add(randomAlphaOfLengthBetween(1, 20));
builder.setIndices(indices);
break;
case 5:
List<String> types;
if (instance.getTypes() == null) {
types = new ArrayList<>();
} else {
types = new ArrayList<>(instance.getTypes());
}
types.add(randomAlphaOfLengthBetween(1, 20));
builder.setTypes(types);
break;
case 6:
BoolQueryBuilder query = new BoolQueryBuilder();
if (instance.getQuery() != null) {
query.must(instance.getQuery());
}
query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
builder.setQuery(query);
break;
case 7:
if (instance.hasAggregations()) {
builder.setAggregations(null);
} else {
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
String timeField = randomAlphaOfLength(10);
aggBuilder.addAggregator(new DateHistogramAggregationBuilder(timeField).field(timeField).interval(between(10000, 3600000))
.subAggregation(new MaxAggregationBuilder(timeField).field(timeField)));
builder.setAggregations(aggBuilder);
if (instance.getScriptFields().isEmpty() == false) {
builder.setScriptFields(Collections.emptyList());
}
}
break;
case 8:
ArrayList<ScriptField> scriptFields = new ArrayList<>(instance.getScriptFields());
scriptFields.add(new ScriptField(randomAlphaOfLengthBetween(1, 10), new Script("foo"), true));
builder.setScriptFields(scriptFields);
builder.setAggregations(null);
break;
case 9:
if (instance.getScrollSize() == null) {
builder.setScrollSize(between(1, 100));
} else {
builder.setScrollSize(instance.getScrollSize() + between(1, 100));
}
break;
case 10:
if (instance.getChunkingConfig() == null || instance.getChunkingConfig().getMode() == Mode.AUTO) {
ChunkingConfig newChunkingConfig = ChunkingConfig.newManual(new TimeValue(randomNonNegativeLong()));
builder.setChunkingConfig(newChunkingConfig);
} else {
builder.setChunkingConfig(null);
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return builder.build();
}
}

View File

@ -15,6 +15,7 @@ import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.job.messages.Messages;
import org.elasticsearch.xpack.ml.job.process.autodetect.writer.RecordWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -848,4 +849,115 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
analysisConfig.setCategorizationFieldName("msg");
return analysisConfig;
}
@Override
protected AnalysisConfig mutateInstance(AnalysisConfig instance) throws IOException {
AnalysisConfig.Builder builder = new AnalysisConfig.Builder(instance);
switch (between(0, 11)) {
case 0:
List<Detector> detectors = new ArrayList<>(instance.getDetectors());
Detector.Builder detector = new Detector.Builder();
detector.setFunction("mean");
detector.setFieldName(randomAlphaOfLengthBetween(10, 20));
detectors.add(detector.build());
builder.setDetectors(detectors);
break;
case 1:
builder.setBucketSpan(new TimeValue(instance.getBucketSpan().millis() + (between(1, 1000) * 1000)));
builder.setMultipleBucketSpans(Collections.emptyList());
break;
case 2:
if (instance.getLatency() == null) {
builder.setLatency(new TimeValue(between(1, 1000) * 1000));
} else {
builder.setLatency(new TimeValue(instance.getLatency().millis() + (between(1, 1000) * 1000)));
}
break;
case 3:
if (instance.getCategorizationFieldName() == null) {
String categorizationFieldName = instance.getCategorizationFieldName() + randomAlphaOfLengthBetween(1, 10);
builder.setCategorizationFieldName(categorizationFieldName);
List<Detector> newDetectors = new ArrayList<>(instance.getDetectors());
Detector.Builder catDetector = new Detector.Builder();
catDetector.setFunction("mean");
catDetector.setFieldName(randomAlphaOfLengthBetween(10, 20));
catDetector.setPartitionFieldName("mlcategory");
newDetectors.add(catDetector.build());
builder.setDetectors(newDetectors);
} else {
builder.setCategorizationFieldName(instance.getCategorizationFieldName() + randomAlphaOfLengthBetween(1, 10));
}
break;
case 4:
List<String> filters;
if (instance.getCategorizationFilters() == null) {
filters = new ArrayList<>();
} else {
filters = new ArrayList<>(instance.getCategorizationFilters());
}
filters.add(randomAlphaOfLengthBetween(1, 20));
builder.setCategorizationFilters(filters);
if (instance.getCategorizationFieldName() == null) {
builder.setCategorizationFieldName(randomAlphaOfLengthBetween(1, 10));
List<Detector> newDetectors = new ArrayList<>(instance.getDetectors());
Detector.Builder catDetector = new Detector.Builder();
catDetector.setFunction("mean");
catDetector.setFieldName(randomAlphaOfLengthBetween(10, 20));
catDetector.setPartitionFieldName("mlcategory");
newDetectors.add(catDetector.build());
builder.setDetectors(newDetectors);
}
break;
case 5:
builder.setSummaryCountFieldName(instance.getSummaryCountFieldName() + randomAlphaOfLengthBetween(1, 5));
break;
case 6:
List<String> influencers = new ArrayList<>(instance.getInfluencers());
influencers.add(randomAlphaOfLengthBetween(5, 10));
builder.setInfluencers(influencers);
builder.setUsePerPartitionNormalization(false);
break;
case 7:
if (instance.getOverlappingBuckets() == null) {
builder.setOverlappingBuckets(randomBoolean());
} else {
builder.setOverlappingBuckets(instance.getOverlappingBuckets() == false);
}
break;
case 8:
if (instance.getResultFinalizationWindow() == null) {
builder.setResultFinalizationWindow(between(1, 100) * 1000L);
} else {
builder.setResultFinalizationWindow(instance.getResultFinalizationWindow() + (between(1, 100) * 1000));
}
break;
case 9:
if (instance.getMultivariateByFields() == null) {
builder.setMultivariateByFields(randomBoolean());
} else {
builder.setMultivariateByFields(instance.getMultivariateByFields() == false);
}
break;
case 10:
List<TimeValue> multipleBucketSpans;
if (instance.getMultipleBucketSpans() == null) {
multipleBucketSpans = new ArrayList<>();
} else {
multipleBucketSpans = new ArrayList<>(instance.getMultipleBucketSpans());
}
multipleBucketSpans.add(new TimeValue(between(2, 10) * instance.getBucketSpan().millis()));
builder.setMultipleBucketSpans(multipleBucketSpans);
break;
case 11:
boolean usePerPartitionNormalization = instance.getUsePerPartitionNormalization() == false;
builder.setUsePerPartitionNormalization(usePerPartitionNormalization);
if (usePerPartitionNormalization) {
builder.setInfluencers(Collections.emptyList());
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return builder.build();
}
}

View File

@ -8,8 +8,6 @@ package org.elasticsearch.xpack.ml.job.config;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -158,4 +156,36 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
new AnalysisLimits(1L, null);
new AnalysisLimits(1L, 1L);
}
protected AnalysisLimits mutateInstance(AnalysisLimits instance) throws IOException {
Long memoryModelLimit = instance.getModelMemoryLimit();
Long categorizationExamplesLimit = instance.getCategorizationExamplesLimit();
switch (between(0, 1)) {
case 0:
if (memoryModelLimit == null) {
memoryModelLimit = randomNonNegativeLong();
} else {
if (randomBoolean()) {
memoryModelLimit = null;
} else {
memoryModelLimit += between(1, 10000);
}
}
break;
case 1:
if (categorizationExamplesLimit == null) {
categorizationExamplesLimit = randomNonNegativeLong();
} else {
if (randomBoolean()) {
categorizationExamplesLimit = null;
} else {
categorizationExamplesLimit += between(1, 10000);
}
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new AnalysisLimits(memoryModelLimit, categorizationExamplesLimit);
};
}

View File

@ -12,6 +12,8 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ml.job.messages.Messages;
import java.io.IOException;
public class ConditionTests extends AbstractSerializingTestCase<Condition> {
public void testSetValues() {
@ -83,4 +85,28 @@ public class ConditionTests extends AbstractSerializingTestCase<Condition> {
() -> new Condition(Operator.MATCH, null));
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CONDITION_INVALID_VALUE_NULL, "[*"), e.getMessage());
}
@Override
protected Condition mutateInstance(Condition instance) throws IOException {
Operator op = instance.getOperator();
String value = instance.getValue();
switch (between(0, 1)) {
case 0:
Operator newOp = op;
while (newOp == op) {
newOp = randomFrom(Operator.values());
}
if (op == Operator.MATCH && newOp != Operator.MATCH) {
value = Double.toString(randomDouble());
}
op = newOp;
break;
case 1:
value = Double.toString(randomDouble());
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new Condition(op, value);
}
}

View File

@ -272,4 +272,44 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
protected DataDescription doParseInstance(XContentParser parser) {
return DataDescription.CONFIG_PARSER.apply(parser, null).build();
}
protected DataDescription mutateInstance(DataDescription instance) throws java.io.IOException {
DataFormat format = instance.getFormat();
String timeField = instance.getTimeField();
String timeFormat = instance.getTimeFormat();
Character delimiter = instance.getFieldDelimiter();
Character quoteChar = instance.getQuoteCharacter();
switch (between(0, 4)) {
case 0:
if (format == DataFormat.DELIMITED) {
format = DataFormat.XCONTENT;
} else {
format = DataFormat.DELIMITED;
}
break;
case 1:
timeField += randomAlphaOfLengthBetween(1, 10);
break;
case 2:
timeFormat = "yyyy-MM-dd-HH-mm-ss";
break;
case 3:
if (delimiter == null) {
delimiter = randomAlphaOfLength(1).charAt(0);
} else {
delimiter = null;
}
break;
case 4:
if (quoteChar == null) {
quoteChar = randomAlphaOfLength(1).charAt(0);
} else {
quoteChar = null;
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new DataDescription(format, timeField, timeFormat, delimiter, quoteChar);
};
}

View File

@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -121,4 +122,41 @@ public class DetectionRuleTests extends AbstractSerializingTestCase<DetectionRul
protected DetectionRule doParseInstance(XContentParser parser) {
return DetectionRule.CONFIG_PARSER.apply(parser, null).build();
}
@Override
protected DetectionRule mutateInstance(DetectionRule instance) throws IOException {
List<RuleCondition> ruleConditions = instance.getRuleConditions();
RuleAction ruleAction = instance.getRuleAction();
String targetFieldName = instance.getTargetFieldName();
String targetFieldValue = instance.getTargetFieldValue();
Connective connective = instance.getConditionsConnective();
switch (between(0, 3)) {
case 0:
ruleConditions = new ArrayList<>(ruleConditions);
ruleConditions.addAll(createRule(Double.toString(randomDouble())));
break;
case 1:
targetFieldName = randomAlphaOfLengthBetween(5, 10);
break;
case 2:
targetFieldValue = randomAlphaOfLengthBetween(5, 10);
if (targetFieldName == null) {
targetFieldName = randomAlphaOfLengthBetween(5, 10);
}
break;
case 3:
if (connective == Connective.AND) {
connective = Connective.OR;
} else {
connective = Connective.AND;
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new DetectionRule.Builder(ruleConditions).setRuleAction(ruleAction).setTargetFieldName(targetFieldName)
.setTargetFieldValue(targetFieldValue).setConditionsConnective(connective).build();
}
}