[ML] Prevent stack overflow while copying ML jobs and datafeeds (#36370)

ML jobs and datafeeds wrap collections into their unmodifiable
equivalents in their constructor. However, the copying builder
does not make a copy of some of those collections resulting
in wrapping those again and again. This can eventually result
to stack overflow.

This commit addressed this issue by copying the collections in
question in the copying builder constructor.

Closes #36360
This commit is contained in:
Dimitris Athanasiou 2018-12-07 20:16:25 +00:00 committed by GitHub
parent ead2b9e08b
commit 7870ae81b1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 34 additions and 18 deletions

View File

@ -300,11 +300,11 @@ public class DatafeedConfig implements ToXContentObject {
this.jobId = config.jobId;
this.queryDelay = config.queryDelay;
this.frequency = config.frequency;
this.indices = config.indices;
this.types = config.types;
this.indices = config.indices == null ? null : new ArrayList<>(config.indices);
this.types = config.types == null ? null : new ArrayList<>(config.types);
this.query = config.query;
this.aggregations = config.aggregations;
this.scriptFields = config.scriptFields;
this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields);
this.scrollSize = config.scrollSize;
this.chunkingConfig = config.chunkingConfig;
this.delayedDataCheckConfig = config.getDelayedDataCheckConfig();

View File

@ -28,8 +28,10 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -426,7 +428,7 @@ public class Job implements ToXContentObject {
public Builder(Job job) {
this.id = job.getId();
this.jobType = job.getJobType();
this.groups = job.getGroups();
this.groups = new ArrayList<>(job.getGroups());
this.description = job.getDescription();
this.analysisConfig = job.getAnalysisConfig();
this.analysisLimits = job.getAnalysisLimits();
@ -439,7 +441,7 @@ public class Job implements ToXContentObject {
this.backgroundPersistInterval = job.getBackgroundPersistInterval();
this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays();
this.resultsRetentionDays = job.getResultsRetentionDays();
this.customSettings = job.getCustomSettings();
this.customSettings = job.getCustomSettings() == null ? null : new HashMap<>(job.getCustomSettings());
this.modelSnapshotId = job.getModelSnapshotId();
this.resultsIndexName = job.getResultsIndexNameNoPrefix();
this.deleting = job.getDeleting();

View File

@ -42,6 +42,8 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -232,8 +234,8 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
this.frequency = frequency;
this.indices = indices == null ? null : Collections.unmodifiableList(indices);
this.types = types == null ? null : Collections.unmodifiableList(types);
this.query = query;
this.aggregations = aggregations;
this.query = query == null ? null : Collections.unmodifiableMap(query);
this.aggregations = aggregations == null ? null : Collections.unmodifiableMap(aggregations);
this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields);
this.scrollSize = scrollSize;
this.chunkingConfig = chunkingConfig;
@ -587,8 +589,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
private Map<String, String> headers = Collections.emptyMap();
private DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.defaultDelayedDataCheckConfig();
public Builder() {
try {
this.query = QUERY_TRANSFORMER.toMap(QueryBuilders.matchAllQuery());
@ -606,14 +606,14 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
this.jobId = config.jobId;
this.queryDelay = config.queryDelay;
this.frequency = config.frequency;
this.indices = config.indices;
this.types = config.types;
this.query = config.query;
this.aggregations = config.aggregations;
this.scriptFields = config.scriptFields;
this.indices = new ArrayList<>(config.indices);
this.types = new ArrayList<>(config.types);
this.query = config.query == null ? null : new LinkedHashMap<>(config.query);
this.aggregations = config.aggregations == null ? null : new LinkedHashMap<>(config.aggregations);
this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields);
this.scrollSize = config.scrollSize;
this.chunkingConfig = config.chunkingConfig;
this.headers = config.headers;
this.headers = new HashMap<>(config.headers);
this.delayedDataCheckConfig = config.getDelayedDataCheckConfig();
}

View File

@ -32,6 +32,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@ -660,7 +661,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
this.id = job.getId();
this.jobType = job.getJobType();
this.jobVersion = job.getJobVersion();
this.groups = job.getGroups();
this.groups = new ArrayList<>(job.getGroups());
this.description = job.getDescription();
this.analysisConfig = job.getAnalysisConfig();
this.analysisLimits = job.getAnalysisLimits();
@ -673,7 +674,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
this.backgroundPersistInterval = job.getBackgroundPersistInterval();
this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays();
this.resultsRetentionDays = job.getResultsRetentionDays();
this.customSettings = job.getCustomSettings();
this.customSettings = job.getCustomSettings() == null ? null : new HashMap<>(job.getCustomSettings());
this.modelSnapshotId = job.getModelSnapshotId();
this.modelSnapshotMinVersion = job.getModelSnapshotMinVersion();
this.resultsIndexName = job.getResultsIndexNameNoPrefix();

View File

@ -6,7 +6,6 @@
package org.elasticsearch.xpack.core.ml.datafeed;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference;
@ -700,6 +699,13 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
}
}
public void testCopyingDatafeedDoesNotCauseStackOverflow() {
DatafeedConfig datafeed = createTestInstance();
for (int i = 0; i < 100000; i++) {
datafeed = new DatafeedConfig.Builder(datafeed).build();
}
}
public static String randomValidDatafeedId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);

View File

@ -563,6 +563,13 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
assertThat(builder.build().earliestValidTimestamp(dataCounts), equalTo(123455789L));
}
public void testCopyingJobDoesNotCauseStackOverflow() {
Job job = createRandomizedJob();
for (int i = 0; i < 100000; i++) {
job = new Job.Builder(job).build();
}
}
public static Job.Builder buildJobBuilder(String id, Date date) {
Job.Builder builder = new Job.Builder(id);
builder.setCreateTime(date);