parent
4d006f09d2
commit
7cd997df84
|
@ -256,12 +256,13 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
Setting.Property.Dynamic,
|
||||
Setting.Property.IndexScope);
|
||||
|
||||
public static final String SETTING_INDEX_HIDDEN = "index.hidden";
|
||||
/**
|
||||
* Whether the index is considered hidden or not. A hidden index will not be resolved in
|
||||
* normal wildcard searches unless explicitly allowed
|
||||
*/
|
||||
public static final Setting<Boolean> INDEX_HIDDEN_SETTING =
|
||||
Setting.boolSetting("index.hidden", false, Property.IndexScope, Property.Final);
|
||||
Setting.boolSetting(SETTING_INDEX_HIDDEN, false, Property.IndexScope, Property.Final);
|
||||
|
||||
/**
|
||||
* an internal index format description, allowing us to find out if this index is upgraded or needs upgrading
|
||||
|
|
|
@ -121,9 +121,6 @@ public class MetaDataCreateIndexService {
|
|||
*/
|
||||
private static final CharacterRunAutomaton DOT_INDICES_EXCLUSIONS = new CharacterRunAutomaton(Regex.simpleMatchToAutomaton(
|
||||
".watch-history-*",
|
||||
".ml-anomalies-*",
|
||||
".ml-notifications-*",
|
||||
".ml-annotations*",
|
||||
".data-frame-notifications-*",
|
||||
".transform-notifications-*"
|
||||
));
|
||||
|
|
|
@ -642,9 +642,6 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase {
|
|||
// this test case should be removed when DOT_INDICES_EXCLUSIONS is empty
|
||||
List<String> excludedNames = Arrays.asList(
|
||||
".watch-history-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
|
||||
".ml-anomalies-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
|
||||
".ml-notifications-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
|
||||
".ml-annotations-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
|
||||
".data-frame-notifications-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
|
||||
".transform-notifications-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT)
|
||||
);
|
||||
|
|
|
@ -22,6 +22,7 @@ import com.carrotsearch.randomizedtesting.RandomizedContext;
|
|||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
|
@ -126,13 +127,20 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
|
|||
public void tearDown() throws Exception {
|
||||
logger.trace("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName());
|
||||
super.tearDown();
|
||||
assertAcked(client().admin().indices().prepareDelete("*").get());
|
||||
assertAcked(
|
||||
client().admin().indices().prepareDelete("*")
|
||||
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
|
||||
.get());
|
||||
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
|
||||
assertThat("test leaves persistent cluster metadata behind: " + metaData.persistentSettings().keySet(),
|
||||
metaData.persistentSettings().size(), equalTo(0));
|
||||
assertThat("test leaves transient cluster metadata behind: " + metaData.transientSettings().keySet(),
|
||||
metaData.transientSettings().size(), equalTo(0));
|
||||
GetIndexResponse indices = client().admin().indices().prepareGetIndex().addIndices("*").get();
|
||||
GetIndexResponse indices =
|
||||
client().admin().indices().prepareGetIndex()
|
||||
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
|
||||
.addIndices("*")
|
||||
.get();
|
||||
assertThat("test leaves indices that were not deleted: " + Strings.arrayToCommaDelimitedString(indices.indices()),
|
||||
indices.indices(), equalTo(Strings.EMPTY_ARRAY));
|
||||
if (resetNodeAfterTest()) {
|
||||
|
|
|
@ -601,9 +601,9 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
protected static void wipeAllIndices() throws IOException {
|
||||
boolean includeHidden = minimumNodeVersion().onOrAfter(Version.V_7_7_0);
|
||||
try {
|
||||
final Request deleteReq = new Request("DELETE", "*");
|
||||
deleteReq.addParameter("expand_wildcards", "open,closed" + (includeHidden ? ",hidden" : ""));
|
||||
final Response response = adminClient().performRequest(deleteReq);
|
||||
final Request deleteRequest = new Request("DELETE", "*");
|
||||
deleteRequest.addParameter("expand_wildcards", "open,closed" + (includeHidden ? ",hidden" : ""));
|
||||
final Response response = adminClient().performRequest(deleteRequest);
|
||||
try (InputStream is = response.getEntity().getContent()) {
|
||||
assertTrue((boolean) XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true).get("acknowledged"));
|
||||
}
|
||||
|
@ -726,6 +726,13 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
protected void refreshAllIndices() throws IOException {
|
||||
boolean includeHidden = minimumNodeVersion().onOrAfter(Version.V_7_7_0);
|
||||
Request refreshRequest = new Request("POST", "/_refresh");
|
||||
refreshRequest.addParameter("expand_wildcards", "open,closed" + (includeHidden ? ",hidden" : ""));
|
||||
client().performRequest(refreshRequest);
|
||||
}
|
||||
|
||||
private void waitForPendingRollupTasks() throws Exception {
|
||||
waitForPendingTasks(adminClient(), taskName -> taskName.startsWith("xpack/rollup/job") == false);
|
||||
}
|
||||
|
|
|
@ -61,11 +61,13 @@ public class AnnotationIndex {
|
|||
// Create the annotations index if it doesn't exist already.
|
||||
if (mlLookup.containsKey(INDEX_NAME) == false) {
|
||||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX_NAME);
|
||||
createIndexRequest.mapping(SINGLE_MAPPING_NAME, annotationsMapping(), XContentType.JSON);
|
||||
createIndexRequest.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1"));
|
||||
CreateIndexRequest createIndexRequest =
|
||||
new CreateIndexRequest(INDEX_NAME)
|
||||
.mapping(SINGLE_MAPPING_NAME, annotationsMapping(), XContentType.JSON)
|
||||
.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1")
|
||||
.put(IndexMetaData.SETTING_INDEX_HIDDEN, true));
|
||||
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest,
|
||||
ActionListener.<CreateIndexResponse>wrap(
|
||||
|
|
|
@ -9,11 +9,9 @@ public final class AnomalyDetectorsIndexFields {
|
|||
|
||||
public static final String CONFIG_INDEX = ".ml-config";
|
||||
|
||||
public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-";
|
||||
|
||||
public static final String STATE_INDEX_PREFIX = ".ml-state";
|
||||
public static final String STATE_INDEX_PATTERN = STATE_INDEX_PREFIX + "*";
|
||||
|
||||
public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-";
|
||||
public static final String RESULTS_INDEX_DEFAULT = "shared";
|
||||
|
||||
private AnomalyDetectorsIndexFields() {}
|
||||
|
|
|
@ -12,7 +12,8 @@
|
|||
"auto_expand_replicas" : "0-1",
|
||||
"query" : {
|
||||
"default_field" : "all_field_values"
|
||||
}
|
||||
},
|
||||
"hidden": true
|
||||
}
|
||||
},
|
||||
"mappings": ${xpack.ml.anomalydetection.results.mappings}
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
],
|
||||
"settings" : {
|
||||
"index" : {
|
||||
"auto_expand_replicas" : "0-1"
|
||||
"auto_expand_replicas" : "0-1",
|
||||
"hidden": true
|
||||
}
|
||||
},
|
||||
"mappings" : {
|
||||
|
|
|
@ -7,7 +7,8 @@
|
|||
"settings" : {
|
||||
"index" : {
|
||||
"number_of_shards" : "1",
|
||||
"auto_expand_replicas" : "0-1"
|
||||
"auto_expand_replicas" : "0-1",
|
||||
"hidden": true
|
||||
}
|
||||
},
|
||||
"mappings" : {
|
||||
|
|
|
@ -127,7 +127,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
|
|||
client().performRequest(airlineData2);
|
||||
|
||||
// Ensure all data is searchable
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
|
||||
String jobId = "mini-farequote-with-data-feeder-job";
|
||||
createFarequoteJob(jobId);
|
||||
|
|
|
@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager;
|
|||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
|
@ -80,7 +81,7 @@ public class CategorizationIT extends MlNativeAutodetectIntegTestCase {
|
|||
public void tearDownData() {
|
||||
cleanUp();
|
||||
client().admin().indices().prepareDelete(DATA_INDEX).get();
|
||||
client().admin().indices().prepareRefresh("*").get();
|
||||
client().admin().indices().prepareRefresh("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get();
|
||||
}
|
||||
|
||||
public void testBasicCategorization() throws Exception {
|
||||
|
|
|
@ -747,7 +747,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":0"));
|
||||
|
||||
// There should be a notification saying that there was a problem extracting data
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
Response notificationsResponse = client().performRequest(
|
||||
new Request("GET", NotificationsIndex.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId));
|
||||
String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity());
|
||||
|
@ -954,7 +954,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
startDatafeedAndWaitUntilStopped(datafeedId, BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
// There should be a notification saying that there was a problem extracting data
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
Response notificationsResponse = client().performRequest(
|
||||
new Request("GET", NotificationsIndex.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId));
|
||||
String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity());
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
|||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.action.update.UpdateAction;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
|
@ -163,7 +164,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
|
|||
}
|
||||
|
||||
// Refresh to ensure the snapshot timestamp updates are visible
|
||||
client().admin().indices().prepareRefresh("*").get();
|
||||
client().admin().indices().prepareRefresh("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get();
|
||||
|
||||
// We need to wait a second to ensure the second time around model snapshots will have a different ID (it depends on epoch seconds)
|
||||
// FIXME it would be better to wait for something concrete instead of wait for time to elapse
|
||||
|
@ -294,6 +295,6 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
|
|||
client().execute(UpdateModelSnapshotAction.INSTANCE, request).get();
|
||||
}
|
||||
// We need to refresh to ensure the updates are visible
|
||||
client().admin().indices().prepareRefresh("*").get();
|
||||
client().admin().indices().prepareRefresh("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -231,7 +231,7 @@ public class MlJobIT extends ESRestTestCase {
|
|||
jobId1, "1236", 1));
|
||||
client().performRequest(createResultRequest);
|
||||
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
|
||||
responseAsString = EntityUtils.toString(client().performRequest(
|
||||
new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1 + "/results/buckets")).getEntity());
|
||||
|
@ -256,7 +256,7 @@ public class MlJobIT extends ESRestTestCase {
|
|||
jobId2, "1236", 1));
|
||||
client().performRequest(createResultRequest);
|
||||
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
|
||||
responseAsString = EntityUtils.toString(client().performRequest(
|
||||
new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2 + "/results/buckets")).getEntity());
|
||||
|
@ -278,7 +278,7 @@ public class MlJobIT extends ESRestTestCase {
|
|||
new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity());
|
||||
assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName));
|
||||
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
|
||||
responseAsString = EntityUtils.toString(client().performRequest(
|
||||
new Request("GET", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName + "/_count")).getEntity());
|
||||
|
@ -289,7 +289,7 @@ public class MlJobIT extends ESRestTestCase {
|
|||
responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_aliases")).getEntity());
|
||||
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))));
|
||||
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
responseAsString = EntityUtils.toString(client().performRequest(
|
||||
new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity());
|
||||
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)));
|
||||
|
@ -670,7 +670,7 @@ public class MlJobIT extends ESRestTestCase {
|
|||
createDoc3.setEntity(createDoc0.getEntity());
|
||||
client().performRequest(createDoc3);
|
||||
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
|
||||
// check for the documents
|
||||
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "/_count")).getEntity()),
|
||||
|
@ -683,7 +683,7 @@ public class MlJobIT extends ESRestTestCase {
|
|||
// Delete
|
||||
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
refreshAllIndices();
|
||||
|
||||
// check that the indices still exist but are empty
|
||||
String indicesAfterDelete = EntityUtils.toString(client().performRequest(
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.ml.integration;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterModule;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
|
@ -119,7 +120,7 @@ abstract class MlNativeIntegTestCase extends ESIntegTestCase {
|
|||
new DeleteExpiredDataAction.Request()).get();
|
||||
|
||||
// We need to refresh to ensure the deletion is visible
|
||||
client().admin().indices().prepareRefresh("*").get();
|
||||
client().admin().indices().prepareRefresh("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get();
|
||||
|
||||
return response;
|
||||
}
|
||||
|
|
|
@ -945,7 +945,6 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin, Analys
|
|||
public Collection<SystemIndexDescriptor> getSystemIndexDescriptors() {
|
||||
return Collections.unmodifiableList(Arrays.asList(
|
||||
new SystemIndexDescriptor(MlMetaIndex.INDEX_NAME, "Contains scheduling and anomaly tracking metadata"),
|
||||
new SystemIndexDescriptor(AnomalyDetectorsIndexFields.STATE_INDEX_PATTERN, "Contains ML model state"),
|
||||
new SystemIndexDescriptor(AnomalyDetectorsIndexFields.CONFIG_INDEX, "Contains ML configuration data"),
|
||||
new SystemIndexDescriptor(InferenceIndexConstants.INDEX_PATTERN, "Contains ML model configuration and statistics")
|
||||
));
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.ml.integration;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -72,7 +73,10 @@ public class AnnotationIndexIT extends MlSingleNodeTestCase {
|
|||
private int numberOfAnnotationsAliases() {
|
||||
int count = 0;
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = client().admin().indices()
|
||||
.prepareGetAliases(AnnotationIndex.READ_ALIAS_NAME, AnnotationIndex.WRITE_ALIAS_NAME).get().getAliases();
|
||||
.prepareGetAliases(AnnotationIndex.READ_ALIAS_NAME, AnnotationIndex.WRITE_ALIAS_NAME)
|
||||
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
|
||||
.get()
|
||||
.getAliases();
|
||||
if (aliases != null) {
|
||||
for (ObjectObjectCursor<String, List<AliasMetaData>> entry : aliases) {
|
||||
count += entry.value.size();
|
||||
|
|
|
@ -165,7 +165,7 @@ public class BasicDistributedJobsIT extends BaseMlIntegTestCase {
|
|||
indexRequest = new IndexRequest("data", "type");
|
||||
indexRequest.source("time", 1407083600L);
|
||||
client().index(indexRequest).get();
|
||||
refresh();
|
||||
refresh("*", ".ml-*");
|
||||
|
||||
Job.Builder job = createScheduledJob("job_id");
|
||||
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.integration;
|
|||
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
|
@ -495,6 +496,7 @@ public class MlDistributedFailureIT extends BaseMlIntegTestCase {
|
|||
// are what we expect them to be:
|
||||
private static DataCounts getDataCountsFromIndex(String jobId) {
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
|
||||
.setQuery(QueryBuilders.idsQuery().addIds(DataCounts.documentId(jobId)))
|
||||
.get();
|
||||
if (searchResponse.getHits().getTotalHits().value != 1) {
|
||||
|
|
|
@ -110,7 +110,8 @@ setup:
|
|||
- do:
|
||||
headers:
|
||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
headers:
|
||||
|
@ -462,7 +463,8 @@ setup:
|
|||
- do:
|
||||
headers:
|
||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
headers:
|
||||
|
@ -511,7 +513,8 @@ setup:
|
|||
- do:
|
||||
headers:
|
||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
ml.delete_job:
|
||||
|
@ -603,7 +606,8 @@ setup:
|
|||
- do:
|
||||
headers:
|
||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
ml.delete_job:
|
||||
|
@ -680,7 +684,8 @@ setup:
|
|||
- do:
|
||||
headers:
|
||||
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
headers:
|
||||
|
|
|
@ -470,7 +470,8 @@
|
|||
model_bytes: 10000000
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
catch: /Invalid update value for analysis_limits[:] model_memory_limit cannot be decreased below current usage; current usage \[9mb\], update had \[5mb\]/
|
||||
|
@ -926,7 +927,8 @@
|
|||
key: value
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
catch: /status_exception/
|
||||
|
@ -953,7 +955,8 @@
|
|||
key: value
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
catch: /status_exception/
|
||||
|
@ -990,7 +993,8 @@
|
|||
}
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
indices.refresh:
|
||||
index: ["*",".ml-*"]
|
||||
|
||||
- do:
|
||||
catch: /status_exception/
|
||||
|
|
Loading…
Reference in New Issue