[ML] Set explicit defaults to AnalysisLimits (elastic/x-pack-elasticsearch#4015)
Analysis limits contain settings that affect the resources used by ML jobs. Those limits always take place. However, explictly setting them is not required as they have reasonable defaults. For a long time those defaults lived on the c++ side. The job could just not have any explicit limits and that meant defaults would be used at the c++ side. This has the disadvantage that it is not obvious to the users what these settings are set to. Additionally, users might not be aware of the settings existence. On top of that, since 6.1, the default model_memory_limit was lowered from 4GB to 1GB. For BWC, this meant that jobs where model_memory_limit is null, the default of 4GB applies. Jobs that were created from 6.1 onwards, contain an explicit setting for model_memory_limit, which is 1GB unless the user sets it differently. This adds additional confusion. This commit makes analysis limits an always explicit setting on the job. Regardless of whether the user sets custom limits or not, the job object (and response) will contain the full analysis limits values. The possibilities for interpretation of missing values are: - the entire analysis_limits is null: this may only happen for jobs created prior to 6.1. Thus we set the model_memory_limit to 4GB. - analysis_limits are non-null but model_memory_limit is: this also may only happen for jobs prior to 6.1. Again, we set memory limit to 4GB. - model_memory_limit is non-null: this either means the user set an explicit value or the job was created from 6.1 onwards and it has the explicit default of 1GB. We simply keep the given value. For categorization_examples_limit the default has always been 4, so we fill that in when it's missing. Finally, note that we still need to handle potential null values for the situation of a mixed cluster. Original commit: elastic/x-pack-elasticsearch@5b6994ef75
This commit is contained in:
parent
d962d33a2a
commit
79d46d1d17
|
@ -87,7 +87,8 @@ The API returns the following results:
|
|||
"influencers": [ ]
|
||||
},
|
||||
"analysis_limits": {
|
||||
"model_memory_limit": "1024mb"
|
||||
"model_memory_limit": "1024mb",
|
||||
"categorization_examples_limit": 4
|
||||
},
|
||||
"data_description": {
|
||||
"time_field": "timestamp",
|
||||
|
|
|
@ -128,7 +128,8 @@ When the job is created, you receive the following results:
|
|||
"influencers": []
|
||||
},
|
||||
"analysis_limits": {
|
||||
"model_memory_limit": "1024mb"
|
||||
"model_memory_limit": "1024mb",
|
||||
"categorization_examples_limit": 4
|
||||
},
|
||||
"data_description": {
|
||||
"time_field": "timestamp",
|
||||
|
|
|
@ -141,7 +141,8 @@ information, including the updated property values. For example:
|
|||
"influencers": []
|
||||
},
|
||||
"analysis_limits": {
|
||||
"model_memory_limit": "1024mb"
|
||||
"model_memory_limit": "1024mb",
|
||||
"categorization_examples_limit": 4
|
||||
},
|
||||
"data_description": {
|
||||
"time_field": "timestamp",
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
|
@ -26,20 +27,21 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Analysis limits for autodetect
|
||||
* <p>
|
||||
* If an option has not been set it shouldn't be used so the default value is picked up instead.
|
||||
* Analysis limits for autodetect. In particular,
|
||||
* this is a collection of parameters that allow limiting
|
||||
* the resources used by the job.
|
||||
*/
|
||||
public class AnalysisLimits implements ToXContentObject, Writeable {
|
||||
|
||||
/**
|
||||
* Prior to 6.1 the default model memory size limit was 4GB, and defined in the C++ code. The default
|
||||
* is now 1GB and defined here in the Java code. However, changing the meaning of a null model memory
|
||||
* limit for existing jobs would be a breaking change, so instead the meaning of <code>null</code> is
|
||||
* still to use the default from the C++ code, but newly created jobs will have this explicit setting
|
||||
* added if none is provided.
|
||||
* is now 1GB and defined here in the Java code. Prior to 6.3, a value of <code>null</code> means that
|
||||
* the old default value should be used. From 6.3 onwards, the value will always be explicit.
|
||||
*/
|
||||
static final long DEFAULT_MODEL_MEMORY_LIMIT_MB = 1024L;
|
||||
static final long PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB = 4096L;
|
||||
|
||||
static final long DEFAULT_CATEGORIZATION_EXAMPLES_LIMIT = 4;
|
||||
|
||||
/**
|
||||
* Serialisation field names
|
||||
|
@ -49,7 +51,9 @@ public class AnalysisLimits implements ToXContentObject, Writeable {
|
|||
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ConstructingObjectParser<AnalysisLimits, Void> METADATA_PARSER = new ConstructingObjectParser<>(
|
||||
"analysis_limits", true, a -> new AnalysisLimits((Long) a[0], (Long) a[1]));
|
||||
"analysis_limits", true, a -> new AnalysisLimits(
|
||||
a[0] == null ? PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB : (Long) a[0],
|
||||
a[1] == null ? DEFAULT_CATEGORIZATION_EXAMPLES_LIMIT : (Long) a[1]));
|
||||
public static final ConstructingObjectParser<AnalysisLimits, Void> CONFIG_PARSER = new ConstructingObjectParser<>(
|
||||
"analysis_limits", false, a -> new AnalysisLimits((Long) a[0], (Long) a[1]));
|
||||
public static final Map<MlParserType, ConstructingObjectParser<AnalysisLimits, Void>> PARSERS =
|
||||
|
@ -110,10 +114,53 @@ public class AnalysisLimits implements ToXContentObject, Writeable {
|
|||
this(in.readOptionalLong(), in.readOptionalLong());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@code AnalysisLimits} object after validating it against external limitations
|
||||
* and filling missing values with their defaults. Validations:
|
||||
*
|
||||
* <ul>
|
||||
* <li>check model memory limit doesn't exceed the MAX_MODEL_MEM setting</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param source an optional {@code AnalysisLimits} whose explicit values will be copied
|
||||
* @param maxModelMemoryLimit the max allowed model memory limit
|
||||
* @param defaultModelMemoryLimit the default model memory limit to be used if an explicit value is missing
|
||||
* @return a new {@code AnalysisLimits} that is validated and has no missing values
|
||||
*/
|
||||
public static AnalysisLimits validateAndSetDefaults(@Nullable AnalysisLimits source, @Nullable ByteSizeValue maxModelMemoryLimit,
|
||||
long defaultModelMemoryLimit) {
|
||||
|
||||
boolean maxModelMemoryIsSet = maxModelMemoryLimit != null && maxModelMemoryLimit.getMb() > 0;
|
||||
|
||||
long modelMemoryLimit = defaultModelMemoryLimit;
|
||||
if (maxModelMemoryIsSet) {
|
||||
modelMemoryLimit = Math.min(maxModelMemoryLimit.getMb(), modelMemoryLimit);
|
||||
}
|
||||
|
||||
long categorizationExamplesLimit = DEFAULT_CATEGORIZATION_EXAMPLES_LIMIT;
|
||||
|
||||
if (source != null) {
|
||||
if (source.getModelMemoryLimit() != null) {
|
||||
modelMemoryLimit = source.getModelMemoryLimit();
|
||||
}
|
||||
if (source.getCategorizationExamplesLimit() != null) {
|
||||
categorizationExamplesLimit = source.getCategorizationExamplesLimit();
|
||||
}
|
||||
}
|
||||
|
||||
if (maxModelMemoryIsSet && modelMemoryLimit > maxModelMemoryLimit.getMb()) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_MODEL_MEMORY_LIMIT_GREATER_THAN_MAX,
|
||||
new ByteSizeValue(modelMemoryLimit, ByteSizeUnit.MB),
|
||||
maxModelMemoryLimit));
|
||||
}
|
||||
|
||||
return new AnalysisLimits(modelMemoryLimit, categorizationExamplesLimit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Maximum size of the model in MB before the anomaly detector
|
||||
* will drop new samples to prevent the model using any more
|
||||
* memory
|
||||
* memory.
|
||||
*
|
||||
* @return The set memory limit or <code>null</code> if not set
|
||||
*/
|
||||
|
|
|
@ -775,8 +775,12 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
return this;
|
||||
}
|
||||
|
||||
public AnalysisLimits getAnalysisLimits() {
|
||||
return analysisLimits;
|
||||
}
|
||||
|
||||
public Builder setAnalysisLimits(AnalysisLimits analysisLimits) {
|
||||
this.analysisLimits = analysisLimits;
|
||||
this.analysisLimits = ExceptionsHelper.requireNonNull(analysisLimits, ANALYSIS_LIMITS.getPreferredName());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -1065,39 +1069,13 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
}
|
||||
|
||||
/**
|
||||
* In 6.1 we want to make the model memory size limit more prominent, and also reduce the default from
|
||||
* 4GB to 1GB. However, changing the meaning of a null model memory limit for existing jobs would be a
|
||||
* breaking change, so instead we add an explicit limit to newly created jobs that didn't have one when
|
||||
* submitted.
|
||||
* Additionally the MAX_MODEL_MEM setting limits the value, an exception is thrown if the max limit
|
||||
* is exceeded.
|
||||
* This is meant to be called when a new job is created.
|
||||
* It will optionally validate the model memory limit against the max limit
|
||||
* and it will set the current version defaults to missing values.
|
||||
*/
|
||||
public void validateModelMemoryLimit(ByteSizeValue maxModelMemoryLimit) {
|
||||
|
||||
boolean maxModelMemoryIsSet = maxModelMemoryLimit != null && maxModelMemoryLimit.getMb() > 0;
|
||||
Long categorizationExampleLimit = null;
|
||||
long modelMemoryLimit;
|
||||
if (maxModelMemoryIsSet) {
|
||||
modelMemoryLimit = Math.min(maxModelMemoryLimit.getMb(), AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB);
|
||||
} else {
|
||||
modelMemoryLimit = AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB;
|
||||
}
|
||||
|
||||
if (analysisLimits != null) {
|
||||
categorizationExampleLimit = analysisLimits.getCategorizationExamplesLimit();
|
||||
|
||||
if (analysisLimits.getModelMemoryLimit() != null) {
|
||||
modelMemoryLimit = analysisLimits.getModelMemoryLimit();
|
||||
|
||||
if (maxModelMemoryIsSet && modelMemoryLimit > maxModelMemoryLimit.getMb()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_MODEL_MEMORY_LIMIT_GREATER_THAN_MAX,
|
||||
new ByteSizeValue(modelMemoryLimit, ByteSizeUnit.MB),
|
||||
maxModelMemoryLimit));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
analysisLimits = new AnalysisLimits(modelMemoryLimit, categorizationExampleLimit);
|
||||
public void validateAnalysisLimitsAndSetDefaults(@Nullable ByteSizeValue maxModelMemoryLimit) {
|
||||
analysisLimits = AnalysisLimits.validateAndSetDefaults(analysisLimits, maxModelMemoryLimit,
|
||||
AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1147,6 +1125,13 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
*/
|
||||
public Job build() {
|
||||
|
||||
// If at the build stage there are missing values from analysis limits,
|
||||
// it means we are reading a pre 6.3 job. Since 6.1, the model_memory_limit
|
||||
// is always populated. So, if the value is missing, we fill with the pre 6.1
|
||||
// default. We do not need to check against the max limit here so we pass null.
|
||||
analysisLimits = AnalysisLimits.validateAndSetDefaults(analysisLimits, null,
|
||||
AnalysisLimits.PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB);
|
||||
|
||||
validateInputFields();
|
||||
|
||||
// Creation time is NOT required in user input, hence validated only on build
|
||||
|
|
|
@ -354,6 +354,9 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
builder.setModelPlotConfig(modelPlotConfig);
|
||||
}
|
||||
if (analysisLimits != null) {
|
||||
AnalysisLimits validatedLimits = AnalysisLimits.validateAndSetDefaults(analysisLimits, maxModelMemoryLimit,
|
||||
AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB);
|
||||
|
||||
Long oldMemoryLimit;
|
||||
if (source.getAnalysisLimits() != null) {
|
||||
oldMemoryLimit = source.getAnalysisLimits().getModelMemoryLimit() != null ?
|
||||
|
@ -363,28 +366,14 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
oldMemoryLimit = UNDEFINED_MODEL_MEMORY_LIMIT_DEFAULT;
|
||||
}
|
||||
|
||||
Long newMemoryLimit = analysisLimits.getModelMemoryLimit() != null ?
|
||||
analysisLimits.getModelMemoryLimit()
|
||||
: oldMemoryLimit;
|
||||
|
||||
if (newMemoryLimit < oldMemoryLimit) {
|
||||
if (validatedLimits.getModelMemoryLimit() < oldMemoryLimit) {
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_MODEL_MEMORY_LIMIT_CANNOT_BE_DECREASED,
|
||||
new ByteSizeValue(oldMemoryLimit, ByteSizeUnit.MB),
|
||||
new ByteSizeValue(newMemoryLimit, ByteSizeUnit.MB)));
|
||||
new ByteSizeValue(validatedLimits.getModelMemoryLimit(), ByteSizeUnit.MB)));
|
||||
}
|
||||
|
||||
boolean maxModelMemoryLimitIsSet = maxModelMemoryLimit != null && maxModelMemoryLimit.getMb() > 0;
|
||||
if (maxModelMemoryLimitIsSet) {
|
||||
Long modelMemoryLimit = analysisLimits.getModelMemoryLimit();
|
||||
if (modelMemoryLimit != null && modelMemoryLimit > maxModelMemoryLimit.getMb()) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_MODEL_MEMORY_LIMIT_GREATER_THAN_MAX,
|
||||
new ByteSizeValue(modelMemoryLimit, ByteSizeUnit.MB),
|
||||
maxModelMemoryLimit));
|
||||
}
|
||||
}
|
||||
|
||||
builder.setAnalysisLimits(analysisLimits);
|
||||
builder.setAnalysisLimits(validatedLimits);
|
||||
}
|
||||
if (renormalizationWindowDays != null) {
|
||||
builder.setRenormalizationWindowDays(renormalizationWindowDays);
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.core.ml.job.config;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
@ -95,7 +96,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
assertEquals("foo", job.getId());
|
||||
assertNotNull(job.getCreateTime());
|
||||
assertNotNull(job.getAnalysisConfig());
|
||||
assertNull(job.getAnalysisLimits());
|
||||
assertNotNull(job.getAnalysisLimits());
|
||||
assertNull(job.getCustomSettings());
|
||||
assertNotNull(job.getDataDescription());
|
||||
assertNull(job.getDescription());
|
||||
|
@ -116,42 +117,41 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
|
||||
public void testEnsureModelMemoryLimitSet() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
builder.setAnalysisLimits(null);
|
||||
builder.validateModelMemoryLimit(new ByteSizeValue(0L));
|
||||
builder.setAnalysisLimits(new AnalysisLimits(null, null));
|
||||
builder.validateAnalysisLimitsAndSetDefaults(new ByteSizeValue(0L));
|
||||
Job job = builder.build();
|
||||
assertEquals("foo", job.getId());
|
||||
assertNotNull(job.getAnalysisLimits());
|
||||
assertThat(job.getAnalysisLimits().getModelMemoryLimit(), equalTo(AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB));
|
||||
assertNull(job.getAnalysisLimits().getCategorizationExamplesLimit());
|
||||
assertThat(job.getAnalysisLimits().getCategorizationExamplesLimit(), equalTo(4L));
|
||||
|
||||
builder.setAnalysisLimits(new AnalysisLimits(AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB * 2, 4L));
|
||||
builder.validateModelMemoryLimit(null);
|
||||
builder.setAnalysisLimits(new AnalysisLimits(AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB * 2, 5L));
|
||||
builder.validateAnalysisLimitsAndSetDefaults(null);
|
||||
job = builder.build();
|
||||
assertNotNull(job.getAnalysisLimits());
|
||||
assertThat(job.getAnalysisLimits().getModelMemoryLimit(), equalTo(AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB * 2));
|
||||
assertThat(job.getAnalysisLimits().getCategorizationExamplesLimit(), equalTo(4L));
|
||||
assertThat(job.getAnalysisLimits().getCategorizationExamplesLimit(), equalTo(5L));
|
||||
}
|
||||
|
||||
public void testValidateModelMemoryLimit_whenMaxIsLessThanTheDefault() {
|
||||
public void testValidateAnalysisLimitsAndSetDefaults_whenMaxIsLessThanTheDefault() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
builder.setAnalysisLimits(null);
|
||||
builder.validateModelMemoryLimit(new ByteSizeValue(512L, ByteSizeUnit.MB));
|
||||
builder.validateAnalysisLimitsAndSetDefaults(new ByteSizeValue(512L, ByteSizeUnit.MB));
|
||||
|
||||
Job job = builder.build();
|
||||
assertNotNull(job.getAnalysisLimits());
|
||||
assertThat(job.getAnalysisLimits().getModelMemoryLimit(), equalTo(512L));
|
||||
assertNull(job.getAnalysisLimits().getCategorizationExamplesLimit());
|
||||
assertThat(job.getAnalysisLimits().getCategorizationExamplesLimit(), equalTo(4L));
|
||||
}
|
||||
|
||||
public void testValidateModelMemoryLimit_throwsWhenMaxLimitIsExceeded() {
|
||||
public void testValidateAnalysisLimitsAndSetDefaults_throwsWhenMaxLimitIsExceeded() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
builder.setAnalysisLimits(new AnalysisLimits(4096L, null));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> builder.validateModelMemoryLimit(new ByteSizeValue(1000L, ByteSizeUnit.MB)));
|
||||
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> builder.validateAnalysisLimitsAndSetDefaults(new ByteSizeValue(1000L, ByteSizeUnit.MB)));
|
||||
assertEquals("model_memory_limit [4gb] must be less than the value of the " +
|
||||
MachineLearningField.MAX_MODEL_MEMORY_LIMIT.getKey() + " setting [1000mb]", e.getMessage());
|
||||
|
||||
builder.validateModelMemoryLimit(new ByteSizeValue(8192L, ByteSizeUnit.MB));
|
||||
builder.validateAnalysisLimitsAndSetDefaults(new ByteSizeValue(8192L, ByteSizeUnit.MB));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentClass() {
|
||||
|
@ -633,7 +633,8 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
builder.setEstablishedModelMemory(randomNonNegativeLong());
|
||||
}
|
||||
builder.setAnalysisConfig(AnalysisConfigTests.createRandomized());
|
||||
builder.setAnalysisLimits(AnalysisLimitsTests.createRandomized());
|
||||
builder.setAnalysisLimits(AnalysisLimits.validateAndSetDefaults(AnalysisLimitsTests.createRandomized(), null,
|
||||
AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB));
|
||||
|
||||
DataDescription.Builder dataDescription = new DataDescription.Builder();
|
||||
dataDescription.setFormat(randomFrom(DataDescription.DataFormat.values()));
|
||||
|
|
|
@ -20,6 +20,7 @@ import java.util.Date;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
||||
|
@ -227,16 +228,18 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
jobBuilder.setAnalysisConfig(ac);
|
||||
jobBuilder.setDataDescription(new DataDescription.Builder());
|
||||
jobBuilder.setCreateTime(new Date());
|
||||
jobBuilder.validateAnalysisLimitsAndSetDefaults(null);
|
||||
|
||||
JobUpdate update = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(null, null)).build();
|
||||
JobUpdate update = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(2048L, 5L)).build();
|
||||
Job updated = update.mergeWithJob(jobBuilder.build(), new ByteSizeValue(0L));
|
||||
assertNull(updated.getAnalysisLimits().getModelMemoryLimit());
|
||||
assertThat(updated.getAnalysisLimits().getModelMemoryLimit(), equalTo(2048L));
|
||||
assertThat(updated.getAnalysisLimits().getCategorizationExamplesLimit(), equalTo(5L));
|
||||
|
||||
JobUpdate updateWithLimit = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(2048L, null)).build();
|
||||
JobUpdate updateWithDecreasedLimit = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(1023L, null)).build();
|
||||
|
||||
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> updateWithLimit.mergeWithJob(jobBuilder.build(), new ByteSizeValue(8000L, ByteSizeUnit.MB)));
|
||||
assertEquals("Invalid update value for analysis_limits: model_memory_limit cannot be decreased; existing is 4gb, update had 2gb",
|
||||
() -> updateWithDecreasedLimit.mergeWithJob(jobBuilder.build(), new ByteSizeValue(8000L, ByteSizeUnit.MB)));
|
||||
assertEquals("Invalid update value for analysis_limits: model_memory_limit cannot be decreased; existing is 1gb, update had 1023mb",
|
||||
e.getMessage());
|
||||
|
||||
JobUpdate updateAboveMaxLimit = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(8000L, null)).build();
|
||||
|
|
|
@ -171,12 +171,8 @@ public class JobManager extends AbstractComponent {
|
|||
*/
|
||||
public void putJob(PutJobAction.Request request, AnalysisRegistry analysisRegistry, ClusterState state,
|
||||
ActionListener<PutJobAction.Response> actionListener) throws IOException {
|
||||
// In 6.1 we want to make the model memory size limit more prominent, and also reduce the default from
|
||||
// 4GB to 1GB. However, changing the meaning of a null model memory limit for existing jobs would be a
|
||||
// breaking change, so instead we add an explicit limit to newly created jobs that didn't have one when
|
||||
// submitted
|
||||
request.getJobBuilder().validateModelMemoryLimit(maxModelMemoryLimit);
|
||||
|
||||
request.getJobBuilder().validateAnalysisLimitsAndSetDefaults(maxModelMemoryLimit);
|
||||
request.getJobBuilder().validateCategorizationAnalyzer(analysisRegistry, environment);
|
||||
|
||||
Job job = request.getJobBuilder().build(new Date());
|
||||
|
|
|
@ -53,6 +53,7 @@
|
|||
}
|
||||
- match: { job_id: "job-crud-test-apis" }
|
||||
- match: { analysis_limits.model_memory_limit: "1024mb" }
|
||||
- match: { analysis_limits.categorization_examples_limit: 4 }
|
||||
|
||||
- do:
|
||||
xpack.ml.get_jobs:
|
||||
|
|
Loading…
Reference in New Issue