[ML] Add setting for job max model memory limit (elastic/x-pack-elasticsearch#2460)
* Add setting for job max model memory limit * Address review comments Original commit: elastic/x-pack-elasticsearch@5cec3a1abf
This commit is contained in:
parent
aaa0510821
commit
bad65b4186
|
@ -22,6 +22,7 @@ import org.elasticsearch.common.settings.Setting;
|
|||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -160,6 +161,8 @@ public class MachineLearning implements ActionPlugin {
|
|||
public static final String MAX_OPEN_JOBS_NODE_ATTR = "ml.max_open_jobs";
|
||||
public static final Setting<Integer> CONCURRENT_JOB_ALLOCATIONS =
|
||||
Setting.intSetting("xpack.ml.node_concurrent_job_allocations", 2, 0, Property.Dynamic, Property.NodeScope);
|
||||
public static final Setting<ByteSizeValue> MAX_MODEL_MEMORY =
|
||||
Setting.memorySizeSetting("xpack.ml.max_model_memory_limit", new ByteSizeValue(0), Property.NodeScope);
|
||||
|
||||
public static final TimeValue STATE_PERSIST_RESTORE_TIMEOUT = TimeValue.timeValueMinutes(30);
|
||||
|
||||
|
@ -186,6 +189,7 @@ public class MachineLearning implements ActionPlugin {
|
|||
Arrays.asList(AUTODETECT_PROCESS,
|
||||
ML_ENABLED,
|
||||
CONCURRENT_JOB_ALLOCATIONS,
|
||||
MAX_MODEL_MEMORY,
|
||||
ProcessCtrl.DONT_PERSIST_MODEL_STATE_SETTING,
|
||||
ProcessCtrl.MAX_ANOMALY_RECORDS_SETTING,
|
||||
DataCountsReporter.ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING,
|
||||
|
|
|
@ -72,13 +72,6 @@ public class PutJobAction extends Action<PutJobAction.Request, PutJobAction.Resp
|
|||
jobBuilder.getId(), jobId));
|
||||
}
|
||||
|
||||
// Some fields cannot be set at create time
|
||||
List<String> invalidJobCreationSettings = jobBuilder.invalidCreateTimeSettings();
|
||||
if (invalidJobCreationSettings.isEmpty() == false) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_CREATE_SETTINGS,
|
||||
String.join(",", invalidJobCreationSettings)));
|
||||
}
|
||||
|
||||
return new Request(jobBuilder);
|
||||
}
|
||||
|
||||
|
@ -88,11 +81,12 @@ public class PutJobAction extends Action<PutJobAction.Request, PutJobAction.Resp
|
|||
// Validate the jobBuilder immediately so that errors can be detected prior to transportation.
|
||||
jobBuilder.validateInputFields();
|
||||
|
||||
// In 6.1 we want to make the model memory size limit more prominent, and also reduce the default from
|
||||
// 4GB to 1GB. However, changing the meaning of a null model memory limit for existing jobs would be a
|
||||
// breaking change, so instead we add an explicit limit to newly created jobs that didn't have one when
|
||||
// submitted
|
||||
jobBuilder.setDefaultMemoryLimitIfUnset();
|
||||
// Some fields cannot be set at create time
|
||||
List<String> invalidJobCreationSettings = jobBuilder.invalidCreateTimeSettings();
|
||||
if (invalidJobCreationSettings.isEmpty() == false) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_CREATE_SETTINGS,
|
||||
String.join(",", invalidJobCreationSettings)));
|
||||
}
|
||||
|
||||
this.jobBuilder = jobBuilder;
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService;
|
|||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.elasticsearch.xpack.ml.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.action.DeleteJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.PutJobAction;
|
||||
|
@ -135,6 +136,13 @@ public class JobManager extends AbstractComponent {
|
|||
* Stores a job in the cluster state
|
||||
*/
|
||||
public void putJob(PutJobAction.Request request, ClusterState state, ActionListener<PutJobAction.Response> actionListener) {
|
||||
// In 6.1 we want to make the model memory size limit more prominent, and also reduce the default from
|
||||
// 4GB to 1GB. However, changing the meaning of a null model memory limit for existing jobs would be a
|
||||
// breaking change, so instead we add an explicit limit to newly created jobs that didn't have one when
|
||||
// submitted
|
||||
request.getJobBuilder().validateModelMemoryLimit(MachineLearning.MAX_MODEL_MEMORY.get(settings));
|
||||
|
||||
|
||||
Job job = request.getJobBuilder().build(new Date());
|
||||
|
||||
MlMetadata currentMlMetadata = state.metaData().custom(MlMetadata.TYPE);
|
||||
|
@ -235,7 +243,7 @@ public class JobManager extends AbstractComponent {
|
|||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
Job job = getJobOrThrowIfUnknown(jobId, currentState);
|
||||
updatedJob = jobUpdate.mergeWithJob(job);
|
||||
updatedJob = jobUpdate.mergeWithJob(job, MachineLearning.MAX_MODEL_MEMORY.get(settings));
|
||||
return updateClusterState(updatedJob, true, currentState);
|
||||
}
|
||||
|
||||
|
|
|
@ -10,9 +10,12 @@ import org.elasticsearch.cluster.AbstractDiffable;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.spi.Message;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
|
@ -716,15 +719,6 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
}
|
||||
|
||||
public Builder setAnalysisLimits(AnalysisLimits analysisLimits) {
|
||||
if (this.analysisLimits != null) {
|
||||
long oldMemoryLimit = this.analysisLimits.getModelMemoryLimit();
|
||||
long newMemoryLimit = analysisLimits.getModelMemoryLimit();
|
||||
if (newMemoryLimit < oldMemoryLimit) {
|
||||
throw new IllegalArgumentException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_MODEL_MEMORY_LIMIT_CANNOT_BE_DECREASED,
|
||||
oldMemoryLimit, newMemoryLimit));
|
||||
}
|
||||
}
|
||||
this.analysisLimits = analysisLimits;
|
||||
return this;
|
||||
}
|
||||
|
@ -1004,14 +998,36 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
* In 6.1 we want to make the model memory size limit more prominent, and also reduce the default from
|
||||
* 4GB to 1GB. However, changing the meaning of a null model memory limit for existing jobs would be a
|
||||
* breaking change, so instead we add an explicit limit to newly created jobs that didn't have one when
|
||||
* submitted
|
||||
* submitted.
|
||||
* Additionally the MAX_MODEL_MEM setting limits the value, an exception is thrown if the max limit
|
||||
* is exceeded.
|
||||
*/
|
||||
public void setDefaultMemoryLimitIfUnset() {
|
||||
if (analysisLimits == null) {
|
||||
analysisLimits = new AnalysisLimits((Long) null);
|
||||
} else if (analysisLimits.getModelMemoryLimit() == null) {
|
||||
analysisLimits = new AnalysisLimits(analysisLimits.getCategorizationExamplesLimit());
|
||||
public void validateModelMemoryLimit(ByteSizeValue maxModelMemoryLimit) {
|
||||
|
||||
boolean maxModelMemoryIsSet = maxModelMemoryLimit != null && maxModelMemoryLimit.getMb() > 0;
|
||||
Long categorizationExampleLimit = null;
|
||||
long modelMemoryLimit;
|
||||
if (maxModelMemoryIsSet) {
|
||||
modelMemoryLimit = Math.min(maxModelMemoryLimit.getMb(), AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB);
|
||||
} else {
|
||||
modelMemoryLimit = AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB;
|
||||
}
|
||||
|
||||
if (analysisLimits != null) {
|
||||
categorizationExampleLimit = analysisLimits.getCategorizationExamplesLimit();
|
||||
|
||||
if (analysisLimits.getModelMemoryLimit() != null) {
|
||||
modelMemoryLimit = analysisLimits.getModelMemoryLimit();
|
||||
|
||||
if (maxModelMemoryIsSet && modelMemoryLimit > maxModelMemoryLimit.getMb()) {
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_MODEL_MEMORY_LIMIT_GREATER_THAN_MAX,
|
||||
new ByteSizeValue(modelMemoryLimit, ByteSizeUnit.MB),
|
||||
maxModelMemoryLimit));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
analysisLimits = new AnalysisLimits(modelMemoryLimit, categorizationExampleLimit);
|
||||
}
|
||||
|
||||
private void validateGroups() {
|
||||
|
|
|
@ -11,16 +11,18 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -48,6 +50,14 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
PARSER.declareString(Builder::setModelSnapshotId, Job.MODEL_SNAPSHOT_ID);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prior to 6.1 a default model_memory_limit was not enforced in Java.
|
||||
* The default of 4GB was used in the C++ code.
|
||||
* If model_memory_limit is not defined for a job then the
|
||||
* job was created before 6.1 and a value of 4GB is assumed.
|
||||
*/
|
||||
private static final long UNDEFINED_MODEL_MEMORY_LIMIT_DEFAULT = 4096;
|
||||
|
||||
private final String jobId;
|
||||
private final List<String> groups;
|
||||
private final String description;
|
||||
|
@ -242,9 +252,10 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
* Updates {@code source} with the new values in this object returning a new {@link Job}.
|
||||
*
|
||||
* @param source Source job to be updated
|
||||
* @param maxModelMemoryLimit The maximum model memory allowed
|
||||
* @return A new job equivalent to {@code source} updated.
|
||||
*/
|
||||
public Job mergeWithJob(Job source) {
|
||||
public Job mergeWithJob(Job source, ByteSizeValue maxModelMemoryLimit) {
|
||||
Job.Builder builder = new Job.Builder(source);
|
||||
if (groups != null) {
|
||||
builder.setGroups(groups);
|
||||
|
@ -278,6 +289,36 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
builder.setModelPlotConfig(modelPlotConfig);
|
||||
}
|
||||
if (analysisLimits != null) {
|
||||
Long oldMemoryLimit;
|
||||
if (source.getAnalysisLimits() != null) {
|
||||
oldMemoryLimit = source.getAnalysisLimits().getModelMemoryLimit() != null ?
|
||||
source.getAnalysisLimits().getModelMemoryLimit()
|
||||
: UNDEFINED_MODEL_MEMORY_LIMIT_DEFAULT;
|
||||
} else {
|
||||
oldMemoryLimit = UNDEFINED_MODEL_MEMORY_LIMIT_DEFAULT;
|
||||
}
|
||||
|
||||
Long newMemoryLimit = analysisLimits.getModelMemoryLimit() != null ?
|
||||
analysisLimits.getModelMemoryLimit()
|
||||
: oldMemoryLimit;
|
||||
|
||||
if (newMemoryLimit < oldMemoryLimit) {
|
||||
throw ExceptionsHelper.badRequestException(
|
||||
Messages.getMessage(Messages.JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_MODEL_MEMORY_LIMIT_CANNOT_BE_DECREASED,
|
||||
new ByteSizeValue(oldMemoryLimit, ByteSizeUnit.MB),
|
||||
new ByteSizeValue(newMemoryLimit, ByteSizeUnit.MB)));
|
||||
}
|
||||
|
||||
boolean maxModelMemoryLimitIsSet = maxModelMemoryLimit != null && maxModelMemoryLimit.getMb() > 0;
|
||||
if (maxModelMemoryLimitIsSet) {
|
||||
Long modelMemoryLimit = analysisLimits.getModelMemoryLimit();
|
||||
if (modelMemoryLimit != null && modelMemoryLimit > maxModelMemoryLimit.getMb()) {
|
||||
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_MODEL_MEMORY_LIMIT_GREATER_THAN_MAX,
|
||||
new ByteSizeValue(modelMemoryLimit, ByteSizeUnit.MB),
|
||||
maxModelMemoryLimit));
|
||||
}
|
||||
}
|
||||
|
||||
builder.setAnalysisLimits(analysisLimits);
|
||||
}
|
||||
if (renormalizationWindowDays != null) {
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.messages;
|
||||
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Locale;
|
||||
|
||||
|
@ -105,6 +107,8 @@ public final class Messages {
|
|||
public static final String JOB_CONFIG_FIELDNAME_INCOMPATIBLE_FUNCTION = "field_name cannot be used with function ''{0}''";
|
||||
public static final String JOB_CONFIG_FIELD_VALUE_TOO_LOW = "{0} cannot be less than {1,number}. Value = {2,number}";
|
||||
public static final String JOB_CONFIG_MODEL_MEMORY_LIMIT_TOO_LOW = "model_memory_limit must be at least 1 MiB. Value = {0,number}";
|
||||
public static final String JOB_CONFIG_MODEL_MEMORY_LIMIT_GREATER_THAN_MAX =
|
||||
"model_memory_limit [{0}] must be less than the value of the " + MachineLearning.MAX_MODEL_MEMORY.getKey() + " setting [{1}]";
|
||||
public static final String JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED =
|
||||
"The ''{0}'' function cannot be used in jobs that will take pre-summarized input";
|
||||
public static final String JOB_CONFIG_FUNCTION_REQUIRES_BYFIELD = "by_field_name must be set when the ''{0}'' function is used";
|
||||
|
|
|
@ -9,6 +9,8 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
@ -17,6 +19,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
|
||||
|
@ -109,12 +112,42 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
|
||||
public void testEnsureModelMemoryLimitSet() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
builder.setDefaultMemoryLimitIfUnset();
|
||||
builder.setAnalysisLimits(null);
|
||||
builder.validateModelMemoryLimit(new ByteSizeValue(0L));
|
||||
Job job = builder.build();
|
||||
|
||||
assertEquals("foo", job.getId());
|
||||
assertNotNull(job.getAnalysisLimits());
|
||||
assertThat(job.getAnalysisLimits().getModelMemoryLimit(), equalTo(AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB));
|
||||
assertNull(job.getAnalysisLimits().getCategorizationExamplesLimit());
|
||||
|
||||
builder.setAnalysisLimits(new AnalysisLimits(AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB * 2, 4L));
|
||||
builder.validateModelMemoryLimit(null);
|
||||
job = builder.build();
|
||||
assertNotNull(job.getAnalysisLimits());
|
||||
assertThat(job.getAnalysisLimits().getModelMemoryLimit(), equalTo(AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB * 2));
|
||||
assertThat(job.getAnalysisLimits().getCategorizationExamplesLimit(), equalTo(4L));
|
||||
}
|
||||
|
||||
public void testValidateModelMemoryLimit_whenMaxIsLessThanTheDefault() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
builder.setAnalysisLimits(null);
|
||||
builder.validateModelMemoryLimit(new ByteSizeValue(512L, ByteSizeUnit.MB));
|
||||
|
||||
Job job = builder.build();
|
||||
assertNotNull(job.getAnalysisLimits());
|
||||
assertThat(job.getAnalysisLimits().getModelMemoryLimit(), equalTo(512L));
|
||||
assertNull(job.getAnalysisLimits().getCategorizationExamplesLimit());
|
||||
}
|
||||
|
||||
public void testValidateModelMemoryLimit_throwsWhenMaxLimitIsExceeded() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
builder.setAnalysisLimits(new AnalysisLimits(4096L, null));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> builder.validateModelMemoryLimit(new ByteSizeValue(1000L, ByteSizeUnit.MB)));
|
||||
assertEquals("model_memory_limit [4gb] must be less than the value of the " +
|
||||
MachineLearning.MAX_MODEL_MEMORY.getKey() + " setting [1000mb]", e.getMessage());
|
||||
|
||||
builder.validateModelMemoryLimit(new ByteSizeValue(8192L, ByteSizeUnit.MB));
|
||||
}
|
||||
|
||||
public void testEquals_GivenDifferentClass() {
|
||||
|
@ -204,15 +237,6 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
assertFalse(jobDetails1.build().equals(jobDetails2.build()));
|
||||
}
|
||||
|
||||
public void testSetAnalysisLimits() {
|
||||
Job.Builder builder = new Job.Builder();
|
||||
builder.setAnalysisLimits(new AnalysisLimits(42L, null));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> builder.setAnalysisLimits(new AnalysisLimits(41L, null)));
|
||||
assertEquals("Invalid update value for analysis_limits: model_memory_limit cannot be decreased; existing is 42, update had 41",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
// JobConfigurationTests:
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,7 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
|
@ -140,7 +143,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
jobBuilder.setDataDescription(new DataDescription.Builder());
|
||||
jobBuilder.setCreateTime(new Date());
|
||||
|
||||
Job updatedJob = update.mergeWithJob(jobBuilder.build());
|
||||
Job updatedJob = update.mergeWithJob(jobBuilder.build(), new ByteSizeValue(0L));
|
||||
|
||||
assertEquals(update.getGroups(), updatedJob.getGroups());
|
||||
assertEquals(update.getDescription(), updatedJob.getDescription());
|
||||
|
@ -171,4 +174,75 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
|||
update = new JobUpdate.Builder("foo").setDetectorUpdates(Collections.singletonList(mock(JobUpdate.DetectorUpdate.class))).build();
|
||||
assertTrue(update.isAutodetectProcessUpdate());
|
||||
}
|
||||
|
||||
public void testUpdateAnalysisLimitWithLowerValue() {
|
||||
Job.Builder jobBuilder = new Job.Builder("foo");
|
||||
Detector.Builder d1 = new Detector.Builder("info_content", "domain");
|
||||
d1.setOverFieldName("mlcategory");
|
||||
Detector.Builder d2 = new Detector.Builder("min", "field");
|
||||
d2.setOverFieldName("host");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build()));
|
||||
ac.setCategorizationFieldName("cat_field");
|
||||
jobBuilder.setAnalysisConfig(ac);
|
||||
jobBuilder.setDataDescription(new DataDescription.Builder());
|
||||
jobBuilder.setCreateTime(new Date());
|
||||
jobBuilder.setAnalysisLimits(new AnalysisLimits(42L, null));
|
||||
|
||||
JobUpdate update = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(41L, null)).build();
|
||||
|
||||
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> update.mergeWithJob(jobBuilder.build(), new ByteSizeValue(0L)));
|
||||
assertEquals("Invalid update value for analysis_limits: model_memory_limit cannot be decreased; existing is 42mb, update had 41mb",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testUpdateAnalysisLimitWithValueGreaterThanMax() {
|
||||
Job.Builder jobBuilder = new Job.Builder("foo");
|
||||
Detector.Builder d1 = new Detector.Builder("info_content", "domain");
|
||||
d1.setOverFieldName("mlcategory");
|
||||
Detector.Builder d2 = new Detector.Builder("min", "field");
|
||||
d2.setOverFieldName("host");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build()));
|
||||
ac.setCategorizationFieldName("cat_field");
|
||||
jobBuilder.setAnalysisConfig(ac);
|
||||
jobBuilder.setDataDescription(new DataDescription.Builder());
|
||||
jobBuilder.setCreateTime(new Date());
|
||||
jobBuilder.setAnalysisLimits(new AnalysisLimits(256L, null));
|
||||
|
||||
JobUpdate update = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(1024L, null)).build();
|
||||
|
||||
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> update.mergeWithJob(jobBuilder.build(), new ByteSizeValue(512L, ByteSizeUnit.MB)));
|
||||
assertEquals("model_memory_limit [1gb] must be less than the value of the xpack.ml.max_model_memory_limit setting [512mb]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testUpdate_withAnalysisLimitsPreviouslyUndefined() {
|
||||
Job.Builder jobBuilder = new Job.Builder("foo");
|
||||
Detector.Builder d1 = new Detector.Builder("info_content", "domain");
|
||||
AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(d1.build()));
|
||||
jobBuilder.setAnalysisConfig(ac);
|
||||
jobBuilder.setDataDescription(new DataDescription.Builder());
|
||||
jobBuilder.setCreateTime(new Date());
|
||||
|
||||
JobUpdate update = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(null, null)).build();
|
||||
Job updated = update.mergeWithJob(jobBuilder.build(), new ByteSizeValue(0L));
|
||||
assertNull(updated.getAnalysisLimits().getModelMemoryLimit());
|
||||
|
||||
JobUpdate updateWithLimit = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(2048L, null)).build();
|
||||
|
||||
ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> updateWithLimit.mergeWithJob(jobBuilder.build(), new ByteSizeValue(8000L, ByteSizeUnit.MB)));
|
||||
assertEquals("Invalid update value for analysis_limits: model_memory_limit cannot be decreased; existing is 4gb, update had 2gb",
|
||||
e.getMessage());
|
||||
|
||||
JobUpdate updateAboveMaxLimit = new JobUpdate.Builder("foo").setAnalysisLimits(new AnalysisLimits(8000L, null)).build();
|
||||
|
||||
e = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> updateAboveMaxLimit.mergeWithJob(jobBuilder.build(), new ByteSizeValue(5000L, ByteSizeUnit.MB)));
|
||||
assertEquals("model_memory_limit [7.8gb] must be less than the value of the xpack.ml.max_model_memory_limit setting [4.8gb]",
|
||||
e.getMessage());
|
||||
|
||||
updateAboveMaxLimit.mergeWithJob(jobBuilder.build(), new ByteSizeValue(10000L, ByteSizeUnit.MB));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -336,6 +336,17 @@
|
|||
- match: { model_snapshot_retention_days: 30 }
|
||||
- match: { results_retention_days: 40 }
|
||||
|
||||
- do:
|
||||
catch: "/Invalid update value for analysis_limits: model_memory_limit cannot be decreased; existing is 20mb, update had 1mb/"
|
||||
xpack.ml.update_job:
|
||||
job_id: jobs-crud-update-job
|
||||
body: >
|
||||
{
|
||||
"analysis_limits": {
|
||||
"model_memory_limit": "1mb"
|
||||
}
|
||||
}
|
||||
|
||||
- do:
|
||||
catch: request
|
||||
xpack.ml.update_job:
|
||||
|
|
Loading…
Reference in New Issue