[7.x] Use static methods (rather than constants) to obtain .ml-meta and .ml-config index names (#58484) (#58490)

This commit is contained in:
Przemysław Witek 2020-06-24 15:52:45 +02:00 committed by GitHub
parent fa88e71532
commit 551b8bcd73
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 159 additions and 143 deletions

View File

@ -11,18 +11,31 @@ import org.elasticsearch.xpack.core.template.TemplateUtils;
import java.util.Collections; import java.util.Collections;
public class MlConfigIndex { public final class MlConfigIndex {
private static final String INDEX_NAME = ".ml-config";
private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version";
private MlConfigIndex() {} /**
* The name of the index where job, datafeed and analytics configuration is stored
*
* @return The index name
*/
public static String indexName() {
return INDEX_NAME;
}
public static String mapping() { public static String mapping() {
return mapping(MapperService.SINGLE_MAPPING_NAME); return mapping(MapperService.SINGLE_MAPPING_NAME);
} }
public static String mapping(String mappingType) { public static String mapping(String mappingType) {
return TemplateUtils.loadTemplate("/org/elasticsearch/xpack/core/ml/config_index_mappings.json", return TemplateUtils.loadTemplate(
Version.CURRENT.toString(), MAPPINGS_VERSION_VARIABLE, Collections.singletonMap("xpack.ml.mapping_type", mappingType)); "/org/elasticsearch/xpack/core/ml/config_index_mappings.json",
Version.CURRENT.toString(),
MAPPINGS_VERSION_VARIABLE,
Collections.singletonMap("xpack.ml.mapping_type", mappingType));
} }
private MlConfigIndex() {}
} }

View File

@ -6,12 +6,18 @@
package org.elasticsearch.xpack.core.ml; package org.elasticsearch.xpack.core.ml;
public final class MlMetaIndex { public final class MlMetaIndex {
private static final String INDEX_NAME = ".ml-meta";
/** /**
* Where to store the ml info in Elasticsearch - must match what's * Where to store the ml info in Elasticsearch - must match what's
* expected by kibana/engineAPI/app/directives/mlLogUsage.js * expected by kibana/engineAPI/app/directives/mlLogUsage.js
*
* @return The index name
*/ */
public static final String INDEX_NAME = ".ml-meta"; public static String indexName() {
return INDEX_NAME;
}
private MlMetaIndex() {} private MlMetaIndex() {}
} }

View File

@ -68,15 +68,6 @@ public final class AnomalyDetectorsIndex {
return AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + "*"; return AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + "*";
} }
/**
* The name of the index where job and datafeed configuration
* is stored
* @return The index name
*/
public static String configIndexName() {
return AnomalyDetectorsIndexFields.CONFIG_INDEX;
}
/** /**
* Creates the .ml-state-000001 index (if necessary) * Creates the .ml-state-000001 index (if necessary)
* Creates the .ml-state-write alias for the .ml-state-000001 index (if necessary) * Creates the .ml-state-write alias for the .ml-state-000001 index (if necessary)

View File

@ -7,8 +7,6 @@ package org.elasticsearch.xpack.core.ml.job.persistence;
public final class AnomalyDetectorsIndexFields { public final class AnomalyDetectorsIndexFields {
public static final String CONFIG_INDEX = ".ml-config";
public static final String STATE_INDEX_PREFIX = ".ml-state"; public static final String STATE_INDEX_PREFIX = ".ml-state";
public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-";

View File

@ -56,6 +56,7 @@ import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction;
import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction;
import org.elasticsearch.xpack.core.ilm.action.StartILMAction; import org.elasticsearch.xpack.core.ilm.action.StartILMAction;
import org.elasticsearch.xpack.core.ilm.action.StopILMAction; import org.elasticsearch.xpack.core.ilm.action.StopILMAction;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction;
@ -1164,8 +1165,8 @@ public class ReservedRolesStoreTests extends ESTestCase {
assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false));
assertNoAccessAllowed(role, "foo"); assertNoAccessAllowed(role, "foo");
assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.CONFIG_INDEX); // internal use only assertNoAccessAllowed(role, MlConfigIndex.indexName()); // internal use only
assertOnlyReadAllowed(role, MlMetaIndex.INDEX_NAME); assertOnlyReadAllowed(role, MlMetaIndex.indexName());
assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX);
assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT);
assertOnlyReadAllowed(role, NotificationsIndex.NOTIFICATIONS_INDEX); assertOnlyReadAllowed(role, NotificationsIndex.NOTIFICATIONS_INDEX);
@ -1320,8 +1321,8 @@ public class ReservedRolesStoreTests extends ESTestCase {
assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false));
assertNoAccessAllowed(role, "foo"); assertNoAccessAllowed(role, "foo");
assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.CONFIG_INDEX); assertNoAccessAllowed(role, MlConfigIndex.indexName());
assertNoAccessAllowed(role, MlMetaIndex.INDEX_NAME); assertNoAccessAllowed(role, MlMetaIndex.indexName());
assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX);
assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT);
assertOnlyReadAllowed(role, NotificationsIndex.NOTIFICATIONS_INDEX); assertOnlyReadAllowed(role, NotificationsIndex.NOTIFICATIONS_INDEX);

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction;
@ -17,7 +18,6 @@ import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields;
import org.junit.After; import org.junit.After;
import java.util.Collections; import java.util.Collections;
@ -42,8 +42,8 @@ public class JobAndDatafeedResilienceIT extends MlNativeAutodetectIntegTestCase
putJob(job); putJob(job);
openJob(job.getId()); openJob(job.getId());
client().prepareDelete(AnomalyDetectorsIndexFields.CONFIG_INDEX, "_doc", Job.documentId(jobId)).get(); client().prepareDelete(MlConfigIndex.indexName(), "_doc", Job.documentId(jobId)).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndexFields.CONFIG_INDEX).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> { ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> {
CloseJobAction.Request request = new CloseJobAction.Request(jobId); CloseJobAction.Request request = new CloseJobAction.Request(jobId);
@ -82,8 +82,8 @@ public class JobAndDatafeedResilienceIT extends MlNativeAutodetectIntegTestCase
putDatafeed(datafeedConfig); putDatafeed(datafeedConfig);
startDatafeed(datafeedConfig.getId(), 0L, null); startDatafeed(datafeedConfig.getId(), 0L, null);
client().prepareDelete(AnomalyDetectorsIndexFields.CONFIG_INDEX, "_doc", DatafeedConfig.documentId(datafeedConfig.getId())).get(); client().prepareDelete(MlConfigIndex.indexName(), "_doc", DatafeedConfig.documentId(datafeedConfig.getId())).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndexFields.CONFIG_INDEX).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> { ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> {
StopDatafeedAction.Request request = new StopDatafeedAction.Request(datafeedConfig.getId()); StopDatafeedAction.Request request = new StopDatafeedAction.Request(datafeedConfig.getId());
@ -120,8 +120,8 @@ public class JobAndDatafeedResilienceIT extends MlNativeAutodetectIntegTestCase
putJob(job2); putJob(job2);
openJob(job2.getId()); openJob(job2.getId());
client().prepareDelete(AnomalyDetectorsIndexFields.CONFIG_INDEX, "_doc", Job.documentId(jobId1)).get(); client().prepareDelete(MlConfigIndex.indexName(), "_doc", Job.documentId(jobId1)).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndexFields.CONFIG_INDEX).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
List<GetJobsStatsAction.Response.JobStats> jobStats = client().execute(GetJobsStatsAction.INSTANCE, List<GetJobsStatsAction.Response.JobStats> jobStats = client().execute(GetJobsStatsAction.INSTANCE,
new GetJobsStatsAction.Request("*")) new GetJobsStatsAction.Request("*"))
@ -175,8 +175,8 @@ public class JobAndDatafeedResilienceIT extends MlNativeAutodetectIntegTestCase
putDatafeed(datafeedConfig2); putDatafeed(datafeedConfig2);
startDatafeed(datafeedConfig2.getId(), 0L, null); startDatafeed(datafeedConfig2.getId(), 0L, null);
client().prepareDelete(AnomalyDetectorsIndexFields.CONFIG_INDEX, "_doc", DatafeedConfig.documentId(datafeedConfig1.getId())).get(); client().prepareDelete(MlConfigIndex.indexName(), "_doc", DatafeedConfig.documentId(datafeedConfig1.getId())).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndexFields.CONFIG_INDEX).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
List<GetDatafeedsStatsAction.Response.DatafeedStats> dfStats = client().execute(GetDatafeedsStatsAction.INSTANCE, List<GetDatafeedsStatsAction.Response.DatafeedStats> dfStats = client().execute(GetDatafeedsStatsAction.INSTANCE,
new GetDatafeedsStatsAction.Request("*")) new GetDatafeedsStatsAction.Request("*"))

View File

@ -157,7 +157,7 @@ abstract class MlNativeIntegTestCase extends ESIntegTestCase {
protected Set<String> excludeTemplates() { protected Set<String> excludeTemplates() {
return new HashSet<>(Arrays.asList( return new HashSet<>(Arrays.asList(
NotificationsIndex.NOTIFICATIONS_INDEX, NotificationsIndex.NOTIFICATIONS_INDEX,
MlMetaIndex.INDEX_NAME, MlMetaIndex.indexName(),
AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX,
AnomalyDetectorsIndex.jobResultsIndexPrefix(), AnomalyDetectorsIndex.jobResultsIndexPrefix(),
InferenceIndexConstants.LATEST_INDEX_NAME, InferenceIndexConstants.LATEST_INDEX_NAME,

View File

@ -27,6 +27,7 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.Transport;
import org.elasticsearch.xpack.core.TestXPackTransportClient; import org.elasticsearch.xpack.core.TestXPackTransportClient;
import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.DeleteDatafeedAction; import org.elasticsearch.xpack.core.ml.action.DeleteDatafeedAction;
import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction;
@ -51,7 +52,6 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.Tree; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.Tree;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.TreeNode; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.TreeNode;
import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.LocalStateMachineLearning;
import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase;
import org.junit.Before; import org.junit.Before;
@ -250,7 +250,7 @@ public class MachineLearningLicensingIT extends BaseMlIntegTestCase {
} }
assertMLAllowed(false); assertMLAllowed(false);
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
// now that the license is invalid, the job should be closed and datafeed stopped: // now that the license is invalid, the job should be closed and datafeed stopped:
assertBusy(() -> { assertBusy(() -> {

View File

@ -14,11 +14,11 @@ import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase;
import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider;
import org.hamcrest.core.IsInstanceOf; import org.hamcrest.core.IsInstanceOf;
@ -249,7 +249,7 @@ public class DatafeedConfigProviderIT extends MlSingleNodeTestCase {
DatafeedConfig bar2 = putDatafeedConfig(createDatafeedConfig("bar-2", "j4"), Collections.emptyMap()); DatafeedConfig bar2 = putDatafeedConfig(createDatafeedConfig("bar-2", "j4"), Collections.emptyMap());
putDatafeedConfig(createDatafeedConfig("not-used", "j5"), Collections.emptyMap()); putDatafeedConfig(createDatafeedConfig("not-used", "j5"), Collections.emptyMap());
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
// Test datafeed IDs only // Test datafeed IDs only
SortedSet<String> expandedIds = SortedSet<String> expandedIds =
@ -302,7 +302,7 @@ public class DatafeedConfigProviderIT extends MlSingleNodeTestCase {
public void testExpandDatafeedsWithTaskData() throws Exception { public void testExpandDatafeedsWithTaskData() throws Exception {
putDatafeedConfig(createDatafeedConfig("foo-2", "j2"), Collections.emptyMap()); putDatafeedConfig(createDatafeedConfig("foo-2", "j2"), Collections.emptyMap());
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder();
tasksBuilder.addTask(MlTasks.datafeedTaskId("foo-1"), tasksBuilder.addTask(MlTasks.datafeedTaskId("foo-1"),
@ -330,7 +330,7 @@ public class DatafeedConfigProviderIT extends MlSingleNodeTestCase {
putDatafeedConfig(createDatafeedConfig("foo-2", "j2"), Collections.emptyMap()); putDatafeedConfig(createDatafeedConfig("foo-2", "j2"), Collections.emptyMap());
putDatafeedConfig(createDatafeedConfig("bar-1", "j3"), Collections.emptyMap()); putDatafeedConfig(createDatafeedConfig("bar-1", "j3"), Collections.emptyMap());
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
AtomicReference<Set<String>> datafeedIdsHolder = new AtomicReference<>(); AtomicReference<Set<String>> datafeedIdsHolder = new AtomicReference<>();
AtomicReference<Exception> exceptionHolder = new AtomicReference<>(); AtomicReference<Exception> exceptionHolder = new AtomicReference<>();

View File

@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -30,7 +31,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Operator;
import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; import org.elasticsearch.xpack.core.ml.job.config.RuleCondition;
import org.elasticsearch.xpack.core.ml.job.config.RuleScope; import org.elasticsearch.xpack.core.ml.job.config.RuleScope;
import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase;
import org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider; import org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider;
import org.junit.Before; import org.junit.Before;
@ -297,7 +297,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
Job harry = putJob(createJob("harry", Collections.singletonList("harry-group"))); Job harry = putJob(createJob("harry", Collections.singletonList("harry-group")));
Job harryJnr = putJob(createJob("harry-jnr", Collections.singletonList("harry-group"))); Job harryJnr = putJob(createJob("harry-jnr", Collections.singletonList("harry-group")));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
// Job Ids // Job Ids
SortedSet<String> expandedIds = blockingCall(actionListener -> SortedSet<String> expandedIds = blockingCall(actionListener ->
@ -369,7 +369,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
Job bar2 = putJob(createJob("bar-2", Collections.singletonList("bar"))); Job bar2 = putJob(createJob("bar-2", Collections.singletonList("bar")));
Job nbar = putJob(createJob("nbar", Collections.singletonList("bar"))); Job nbar = putJob(createJob("nbar", Collections.singletonList("bar")));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
// Test job IDs only // Test job IDs only
SortedSet<String> expandedIds = blockingCall(actionListener -> jobConfigProvider.expandJobsIds("foo*", SortedSet<String> expandedIds = blockingCall(actionListener -> jobConfigProvider.expandJobsIds("foo*",
@ -417,7 +417,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
Boolean marked = blockingCall(actionListener -> jobConfigProvider.markJobAsDeleting("foo-deleting", actionListener)); Boolean marked = blockingCall(actionListener -> jobConfigProvider.markJobAsDeleting("foo-deleting", actionListener));
assertTrue(marked); assertTrue(marked);
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
SortedSet<String> expandedIds = blockingCall(actionListener -> jobConfigProvider.expandJobsIds("foo*", SortedSet<String> expandedIds = blockingCall(actionListener -> jobConfigProvider.expandJobsIds("foo*",
true, true,
@ -448,7 +448,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
putJob(createJob("foo-1", null)); putJob(createJob("foo-1", null));
putJob(createJob("bar", null)); putJob(createJob("bar", null));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder();
tasksBuilder.addTask(MlTasks.jobTaskId("foo-2"), tasksBuilder.addTask(MlTasks.jobTaskId("foo-2"),
@ -480,7 +480,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
putJob(createJob("tomato", Arrays.asList("fruit", "veg"))); putJob(createJob("tomato", Arrays.asList("fruit", "veg")));
putJob(createJob("unrelated", Collections.emptyList())); putJob(createJob("unrelated", Collections.emptyList()));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
SortedSet<String> expandedIds = blockingCall(actionListener -> SortedSet<String> expandedIds = blockingCall(actionListener ->
jobConfigProvider.expandGroupIds(Collections.singletonList("fruit"), actionListener)); jobConfigProvider.expandGroupIds(Collections.singletonList("fruit"), actionListener));
@ -517,7 +517,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
jobWithRules2 = addCustomRule(jobWithRules2, rule); jobWithRules2 = addCustomRule(jobWithRules2, rule);
putJob(jobWithRules2); putJob(jobWithRules2);
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
List<Job> foundJobs = blockingCall(listener -> jobConfigProvider.findJobsWithCustomRules(listener)); List<Job> foundJobs = blockingCall(listener -> jobConfigProvider.findJobsWithCustomRules(listener));
@ -568,7 +568,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
String jobId = "mark-as-deleting-job"; String jobId = "mark-as-deleting-job";
putJob(createJob(jobId, Collections.emptyList())); putJob(createJob(jobId, Collections.emptyList()));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get(); client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
exceptionHolder.set(null); exceptionHolder.set(null);
blockingCall(listener -> jobConfigProvider.markJobAsDeleting(jobId, listener), responseHolder, exceptionHolder); blockingCall(listener -> jobConfigProvider.markJobAsDeleting(jobId, listener), responseHolder, exceptionHolder);

View File

@ -428,7 +428,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
throw exceptionHolder.get(); throw exceptionHolder.get();
} }
client().admin().indices().prepareRefresh(MlMetaIndex.INDEX_NAME).get(); client().admin().indices().prepareRefresh(MlMetaIndex.indexName()).get();
} }
private Calendar getCalendar(String calendarId) throws Exception { private Calendar getCalendar(String calendarId) throws Exception {
@ -586,7 +586,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
Quantiles quantiles = new Quantiles(jobId, new Date(), "quantile-state"); Quantiles quantiles = new Quantiles(jobId, new Date(), "quantile-state");
indexQuantiles(quantiles); indexQuantiles(quantiles);
client().admin().indices().prepareRefresh(MlMetaIndex.INDEX_NAME, AnomalyDetectorsIndex.jobStateIndexPattern(), client().admin().indices().prepareRefresh(MlMetaIndex.indexName(), AnomalyDetectorsIndex.jobStateIndexPattern(),
AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).get(); AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).get();
@ -706,7 +706,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (ScheduledEvent event : events) { for (ScheduledEvent event : events) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap( ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(
ToXContentParams.FOR_INTERNAL_STORAGE, "true")); ToXContentParams.FOR_INTERNAL_STORAGE, "true"));
@ -730,7 +730,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (MlFilter filter : filters) { for (MlFilter filter : filters) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(filter.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap( ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(
ToXContentParams.FOR_INTERNAL_STORAGE, "true")); ToXContentParams.FOR_INTERNAL_STORAGE, "true"));
@ -764,7 +764,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (Calendar calendar: calendars) { for (Calendar calendar: calendars) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(calendar.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(calendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams( ToXContent.MapParams params = new ToXContent.MapParams(
Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")); Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true"));

View File

@ -36,6 +36,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
@ -410,14 +411,14 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
} }
private void addMlConfigIndex(Metadata.Builder metadata, RoutingTable.Builder routingTable) { private void addMlConfigIndex(Metadata.Builder metadata, RoutingTable.Builder routingTable) {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(AnomalyDetectorsIndex.configIndexName()); IndexMetadata.Builder indexMetadata = IndexMetadata.builder(MlConfigIndex.indexName());
indexMetadata.settings(Settings.builder() indexMetadata.settings(Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
); );
metadata.put(indexMetadata); metadata.put(indexMetadata);
Index index = new Index(AnomalyDetectorsIndex.configIndexName(), "_uuid"); Index index = new Index(MlConfigIndex.indexName(), "_uuid");
ShardId shardId = new ShardId(index, 0); ShardId shardId = new ShardId(index, 0);
ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE, ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""));
@ -449,7 +450,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
} }
private boolean configIndexExists() { private boolean configIndexExists() {
return client().admin().indices().prepareExists(AnomalyDetectorsIndex.configIndexName()).get().isExists(); return client().admin().indices().prepareExists(MlConfigIndex.indexName()).get().isExists();
} }
} }

View File

@ -66,6 +66,7 @@ import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackPlugin;
import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MachineLearningField;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction;
@ -966,7 +967,7 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin,
List<String> templateNames = List<String> templateNames =
Arrays.asList( Arrays.asList(
NotificationsIndex.NOTIFICATIONS_INDEX, NotificationsIndex.NOTIFICATIONS_INDEX,
MlMetaIndex.INDEX_NAME, MlMetaIndex.indexName(),
AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX,
AnomalyDetectorsIndex.jobResultsIndexPrefix(), AnomalyDetectorsIndex.jobResultsIndexPrefix(),
InferenceIndexConstants.LATEST_INDEX_NAME); InferenceIndexConstants.LATEST_INDEX_NAME);
@ -1011,8 +1012,8 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin,
@Override @Override
public Collection<SystemIndexDescriptor> getSystemIndexDescriptors(Settings settings) { public Collection<SystemIndexDescriptor> getSystemIndexDescriptors(Settings settings) {
return Collections.unmodifiableList(Arrays.asList( return Collections.unmodifiableList(Arrays.asList(
new SystemIndexDescriptor(MlMetaIndex.INDEX_NAME, "Contains scheduling and anomaly tracking metadata"), new SystemIndexDescriptor(MlMetaIndex.indexName(), "Contains scheduling and anomaly tracking metadata"),
new SystemIndexDescriptor(AnomalyDetectorsIndexFields.CONFIG_INDEX, "Contains ML configuration data"), new SystemIndexDescriptor(MlConfigIndex.indexName(), "Contains ML configuration data"),
new SystemIndexDescriptor(InferenceIndexConstants.INDEX_PATTERN, "Contains ML model configuration and statistics") new SystemIndexDescriptor(InferenceIndexConstants.INDEX_PATTERN, "Contains ML model configuration and statistics")
)); ));
} }

View File

@ -12,10 +12,10 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
/** /**
* Checks whether migration can start and whether ML resources (e.g. jobs, datafeeds) * Checks whether migration can start and whether ML resources (e.g. jobs, datafeeds)
@ -63,11 +63,11 @@ public class MlConfigMigrationEligibilityCheck {
} }
static boolean mlConfigIndexIsAllocated(ClusterState clusterState) { static boolean mlConfigIndexIsAllocated(ClusterState clusterState) {
if (clusterState.metadata().hasIndex(AnomalyDetectorsIndex.configIndexName()) == false) { if (clusterState.metadata().hasIndex(MlConfigIndex.indexName()) == false) {
return false; return false;
} }
IndexRoutingTable routingTable = clusterState.getRoutingTable().index(AnomalyDetectorsIndex.configIndexName()); IndexRoutingTable routingTable = clusterState.getRoutingTable().index(MlConfigIndex.indexName());
if (routingTable == null || routingTable.allPrimaryShardsActive() == false) { if (routingTable == null || routingTable.allPrimaryShardsActive() == false) {
return false; return false;
} }

View File

@ -166,7 +166,7 @@ public class MlConfigMigrator {
return; return;
} }
if (clusterState.metadata().hasIndex(AnomalyDetectorsIndex.configIndexName()) == false) { if (clusterState.metadata().hasIndex(MlConfigIndex.indexName()) == false) {
createConfigIndex(ActionListener.wrap( createConfigIndex(ActionListener.wrap(
response -> { response -> {
unMarkMigrationInProgress.onResponse(Boolean.FALSE); unMarkMigrationInProgress.onResponse(Boolean.FALSE);
@ -424,7 +424,7 @@ public class MlConfigMigrator {
} }
private IndexRequest indexRequest(ToXContentObject source, String documentId, ToXContent.Params params) { private IndexRequest indexRequest(ToXContentObject source, String documentId, ToXContent.Params params) {
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName()).id(documentId); IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName()).id(documentId);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(source.toXContent(builder, params)); indexRequest.source(source.toXContent(builder, params));
@ -490,7 +490,7 @@ public class MlConfigMigrator {
private void createConfigIndex(ActionListener<Boolean> listener) { private void createConfigIndex(ActionListener<Boolean> listener) {
logger.info("creating the .ml-config index"); logger.info("creating the .ml-config index");
CreateIndexRequest createIndexRequest = new CreateIndexRequest(AnomalyDetectorsIndex.configIndexName()); CreateIndexRequest createIndexRequest = new CreateIndexRequest(MlConfigIndex.indexName());
try try
{ {
createIndexRequest.settings( createIndexRequest.settings(

View File

@ -42,7 +42,7 @@ public class MlIndexTemplateRegistry extends IndexTemplateRegistry {
private static final IndexTemplateConfig ANOMALY_DETECTION_STATE_TEMPLATE = stateTemplate(); private static final IndexTemplateConfig ANOMALY_DETECTION_STATE_TEMPLATE = stateTemplate();
private static final IndexTemplateConfig META_TEMPLATE = new IndexTemplateConfig(MlMetaIndex.INDEX_NAME, private static final IndexTemplateConfig META_TEMPLATE = new IndexTemplateConfig(MlMetaIndex.indexName(),
ROOT_RESOURCE_PATH + "meta_index_template.json", Version.CURRENT.id, VERSION_PATTERN, ROOT_RESOURCE_PATH + "meta_index_template.json", Version.CURRENT.id, VERSION_PATTERN,
Collections.singletonMap(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id))); Collections.singletonMap(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id)));
@ -69,7 +69,7 @@ public class MlIndexTemplateRegistry extends IndexTemplateRegistry {
String.valueOf(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)); String.valueOf(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW));
variables.put("xpack.ml.config.mappings", MlConfigIndex.mapping()); variables.put("xpack.ml.config.mappings", MlConfigIndex.mapping());
return new IndexTemplateConfig(AnomalyDetectorsIndex.configIndexName(), return new IndexTemplateConfig(MlConfigIndex.indexName(),
ROOT_RESOURCE_PATH + "config_index_template.json", ROOT_RESOURCE_PATH + "config_index_template.json",
Version.CURRENT.id, VERSION_PATTERN, Version.CURRENT.id, VERSION_PATTERN,
variables); variables);

View File

@ -74,7 +74,7 @@ public class TransportDeleteCalendarAction extends HandledTransportAction<Delete
} }
private DeleteByQueryRequest buildDeleteByQuery(String calendarId) { private DeleteByQueryRequest buildDeleteByQuery(String calendarId) {
DeleteByQueryRequest request = new DeleteByQueryRequest(MlMetaIndex.INDEX_NAME); DeleteByQueryRequest request = new DeleteByQueryRequest(MlMetaIndex.indexName());
request.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES); request.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
request.setRefresh(true); request.setRefresh(true);

View File

@ -55,7 +55,7 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction<D
ActionListener<Calendar> calendarListener = ActionListener.wrap( ActionListener<Calendar> calendarListener = ActionListener.wrap(
calendar -> { calendar -> {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, eventId); GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), eventId);
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap( executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap(
getResponse -> { getResponse -> {
if (getResponse.isExists() == false) { if (getResponse.isExists() == false) {
@ -89,7 +89,7 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction<D
} }
private void deleteEvent(String eventId, Calendar calendar, ActionListener<AcknowledgedResponse> listener) { private void deleteEvent(String eventId, Calendar calendar, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, eventId); DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.indexName(), eventId);
deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest, executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest,

View File

@ -39,6 +39,7 @@ import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlStatsIndex; import org.elasticsearch.xpack.core.ml.MlStatsIndex;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.DeleteDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.DeleteDataFrameAnalyticsAction;
@ -222,7 +223,7 @@ public class TransportDeleteDataFrameAnalyticsAction
} }
private void deleteConfig(ParentTaskAssigningClient parentTaskClient, String id, ActionListener<AcknowledgedResponse> listener) { private void deleteConfig(ParentTaskAssigningClient parentTaskClient, String id, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(AnomalyDetectorsIndex.configIndexName()); DeleteRequest deleteRequest = new DeleteRequest(MlConfigIndex.indexName());
deleteRequest.id(DataFrameAnalyticsConfig.documentId(id)); deleteRequest.id(DataFrameAnalyticsConfig.documentId(id));
deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest, ActionListener.wrap( executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest, ActionListener.wrap(

View File

@ -82,7 +82,7 @@ public class TransportDeleteFilterAction extends HandledTransportAction<DeleteFi
} }
private void deleteFilter(String filterId, ActionListener<AcknowledgedResponse> listener) { private void deleteFilter(String filterId, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId)); DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.indexName(), MlFilter.documentId(filterId));
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
bulkRequestBuilder.add(deleteRequest); bulkRequestBuilder.add(deleteRequest);
bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);

View File

@ -24,9 +24,9 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.utils.VoidChainTaskExecutor; import org.elasticsearch.xpack.ml.utils.VoidChainTaskExecutor;
@ -76,7 +76,7 @@ public class TransportFinalizeJobExecutionAction extends TransportMasterNodeActi
Map<String, Object> update = Collections.singletonMap(Job.FINISHED_TIME.getPreferredName(), new Date()); Map<String, Object> update = Collections.singletonMap(Job.FINISHED_TIME.getPreferredName(), new Date());
for (String jobId: request.getJobIds()) { for (String jobId: request.getJobIds()) {
UpdateRequest updateRequest = new UpdateRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId)); UpdateRequest updateRequest = new UpdateRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
updateRequest.retryOnConflict(3); updateRequest.retryOnConflict(3);
updateRequest.doc(update); updateRequest.doc(update);
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);

View File

@ -19,9 +19,9 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.action.AbstractTransportGetResourcesAction; import org.elasticsearch.xpack.core.action.AbstractTransportGetResourcesAction;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
@ -43,7 +43,7 @@ public class TransportGetDataFrameAnalyticsAction extends AbstractTransportGetRe
@Override @Override
protected String[] getIndices() { protected String[] getIndices() {
return new String[] { AnomalyDetectorsIndex.configIndexName() }; return new String[] { MlConfigIndex.indexName() };
} }
@Override @Override

View File

@ -54,7 +54,7 @@ public class TransportGetFiltersAction extends AbstractTransportGetResourcesActi
@Override @Override
protected String[] getIndices() { protected String[] getIndices() {
return new String[]{MlMetaIndex.INDEX_NAME}; return new String[]{MlMetaIndex.indexName()};
} }
@Override @Override

View File

@ -42,6 +42,7 @@ import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
@ -134,11 +135,11 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
static String[] indicesOfInterest(String resultsIndex) { static String[] indicesOfInterest(String resultsIndex) {
if (resultsIndex == null) { if (resultsIndex == null) {
return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), MlMetaIndex.INDEX_NAME, return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), MlMetaIndex.indexName(),
AnomalyDetectorsIndex.configIndexName()}; MlConfigIndex.indexName()};
} }
return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), resultsIndex, MlMetaIndex.INDEX_NAME, return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), resultsIndex, MlMetaIndex.indexName(),
AnomalyDetectorsIndex.configIndexName()}; MlConfigIndex.indexName()};
} }
static List<String> verifyIndicesPrimaryShardsAreActive(String resultsWriteIndex, ClusterState clusterState, static List<String> verifyIndicesPrimaryShardsAreActive(String resultsWriteIndex, ClusterState clusterState,

View File

@ -62,7 +62,7 @@ public class TransportPostCalendarEventsAction extends HandledTransportAction<Po
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
for (ScheduledEvent event: events) { for (ScheduledEvent event: events) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(event.toXContent(builder, indexRequest.source(event.toXContent(builder,
new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE,

View File

@ -47,7 +47,7 @@ public class TransportPutCalendarAction extends HandledTransportAction<PutCalend
protected void doExecute(Task task, PutCalendarAction.Request request, ActionListener<PutCalendarAction.Response> listener) { protected void doExecute(Task task, PutCalendarAction.Request request, ActionListener<PutCalendarAction.Response> listener) {
Calendar calendar = request.getCalendar(); Calendar calendar = request.getCalendar();
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(calendar.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(calendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(calendar.toXContent(builder, indexRequest.source(calendar.toXContent(builder,
new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")))); new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true"))));

View File

@ -39,7 +39,6 @@ import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.PutDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.PutDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.SecurityContext;
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction;
@ -212,7 +211,7 @@ public class TransportPutDataFrameAnalyticsAction
return; return;
} }
ElasticsearchMappings.addDocMappingIfMissing( ElasticsearchMappings.addDocMappingIfMissing(
AnomalyDetectorsIndex.configIndexName(), MlConfigIndex.indexName(),
MlConfigIndex::mapping, MlConfigIndex::mapping,
client, client,
clusterState, clusterState,

View File

@ -40,7 +40,6 @@ import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction; import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction;
@ -211,7 +210,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
return; return;
} }
ElasticsearchMappings.addDocMappingIfMissing( ElasticsearchMappings.addDocMappingIfMissing(
AnomalyDetectorsIndex.configIndexName(), MlConfigIndex.indexName(),
MlConfigIndex::mapping, MlConfigIndex::mapping,
client, client,
clusterState, clusterState,

View File

@ -47,7 +47,7 @@ public class TransportPutFilterAction extends HandledTransportAction<PutFilterAc
@Override @Override
protected void doExecute(Task task, PutFilterAction.Request request, ActionListener<PutFilterAction.Response> listener) { protected void doExecute(Task task, PutFilterAction.Request request, ActionListener<PutFilterAction.Response> listener) {
MlFilter filter = request.getFilter(); MlFilter filter = request.getFilter();
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(filter.documentId());
indexRequest.opType(DocWriteRequest.OpType.CREATE); indexRequest.opType(DocWriteRequest.OpType.CREATE);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {

View File

@ -48,6 +48,7 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.common.validation.SourceDestValidator; import org.elasticsearch.xpack.core.common.validation.SourceDestValidator;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlStatsIndex; import org.elasticsearch.xpack.core.ml.MlStatsIndex;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
@ -640,7 +641,7 @@ public class TransportStartDataFrameAnalyticsAction
List<String> unavailableIndices = List<String> unavailableIndices =
verifyIndicesPrimaryShardsAreActive(clusterState, verifyIndicesPrimaryShardsAreActive(clusterState,
resolver, resolver,
AnomalyDetectorsIndex.configIndexName(), MlConfigIndex.indexName(),
MlStatsIndex.indexPattern(), MlStatsIndex.indexPattern(),
AnomalyDetectorsIndex.jobStateIndexPattern()); AnomalyDetectorsIndex.jobStateIndexPattern());
if (unavailableIndices.size() != 0) { if (unavailableIndices.size() != 0) {

View File

@ -101,7 +101,7 @@ public class TransportUpdateFilterAction extends HandledTransportAction<UpdateFi
private void indexUpdatedFilter(MlFilter filter, final long seqNo, final long primaryTerm, private void indexUpdatedFilter(MlFilter filter, final long seqNo, final long primaryTerm,
UpdateFilterAction.Request request, UpdateFilterAction.Request request,
ActionListener<PutFilterAction.Response> listener) { ActionListener<PutFilterAction.Response> listener) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(filter.documentId());
indexRequest.setIfSeqNo(seqNo); indexRequest.setIfSeqNo(seqNo);
indexRequest.setIfPrimaryTerm(primaryTerm); indexRequest.setIfPrimaryTerm(primaryTerm);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
@ -137,7 +137,7 @@ public class TransportUpdateFilterAction extends HandledTransportAction<UpdateFi
} }
private void getFilterWithVersion(String filterId, ActionListener<FilterWithSeqNo> listener) { private void getFilterWithVersion(String filterId, ActionListener<FilterWithSeqNo> listener) {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId)); GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), MlFilter.documentId(filterId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
public void onResponse(GetResponse getDocResponse) { public void onResponse(GetResponse getDocResponse) {

View File

@ -46,6 +46,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate;
@ -117,7 +118,7 @@ public class DatafeedConfigProvider {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder source = config.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); XContentBuilder source = config.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName()) IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(DatafeedConfig.documentId(datafeedId)) .id(DatafeedConfig.documentId(datafeedId))
.source(source) .source(source)
.opType(DocWriteRequest.OpType.CREATE) .opType(DocWriteRequest.OpType.CREATE)
@ -152,7 +153,7 @@ public class DatafeedConfigProvider {
* @param datafeedConfigListener The config listener * @param datafeedConfigListener The config listener
*/ */
public void getDatafeedConfig(String datafeedId, ActionListener<DatafeedConfig.Builder> datafeedConfigListener) { public void getDatafeedConfig(String datafeedId, ActionListener<DatafeedConfig.Builder> datafeedConfigListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId)); GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), DatafeedConfig.documentId(datafeedId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
public void onResponse(GetResponse getResponse) { public void onResponse(GetResponse getResponse) {
@ -189,7 +190,7 @@ public class DatafeedConfigProvider {
sourceBuilder.fetchSource(false); sourceBuilder.fetchSource(false);
sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), null); sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSize(jobIds.size()) .setSize(jobIds.size())
.setSource(sourceBuilder).request(); .setSource(sourceBuilder).request();
@ -219,7 +220,7 @@ public class DatafeedConfigProvider {
* @param actionListener Deleted datafeed listener * @param actionListener Deleted datafeed listener
*/ */
public void deleteDatafeedConfig(String datafeedId, ActionListener<DeleteResponse> actionListener) { public void deleteDatafeedConfig(String datafeedId, ActionListener<DeleteResponse> actionListener) {
DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId)); DeleteRequest request = new DeleteRequest(MlConfigIndex.indexName(), DatafeedConfig.documentId(datafeedId));
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() {
@Override @Override
@ -256,7 +257,7 @@ public class DatafeedConfigProvider {
public void updateDatefeedConfig(String datafeedId, DatafeedUpdate update, Map<String, String> headers, public void updateDatefeedConfig(String datafeedId, DatafeedUpdate update, Map<String, String> headers,
BiConsumer<DatafeedConfig, ActionListener<Boolean>> validator, BiConsumer<DatafeedConfig, ActionListener<Boolean>> validator,
ActionListener<DatafeedConfig> updatedConfigListener) { ActionListener<DatafeedConfig> updatedConfigListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId)); GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), DatafeedConfig.documentId(datafeedId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
@ -312,7 +313,7 @@ public class DatafeedConfigProvider {
ActionListener<IndexResponse> listener) { ActionListener<IndexResponse> listener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder updatedSource = updatedConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); XContentBuilder updatedSource = updatedConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName()) IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(DatafeedConfig.documentId(updatedConfig.getId())) .id(DatafeedConfig.documentId(updatedConfig.getId()))
.source(updatedSource) .source(updatedSource)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
@ -365,7 +366,7 @@ public class DatafeedConfigProvider {
sourceBuilder.fetchSource(false); sourceBuilder.fetchSource(false);
sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), null); sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder) .setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) .setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
@ -417,7 +418,7 @@ public class DatafeedConfigProvider {
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildDatafeedIdQuery(tokens)); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildDatafeedIdQuery(tokens));
sourceBuilder.sort(DatafeedConfig.ID.getPreferredName()); sourceBuilder.sort(DatafeedConfig.ID.getPreferredName());
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder) .setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) .setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)

View File

@ -31,9 +31,9 @@ import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.action.util.PageParams;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams;
@ -79,7 +79,7 @@ public class DataFrameAnalyticsConfigProvider {
} }
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
config.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); config.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName()) IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(DataFrameAnalyticsConfig.documentId(config.getId())) .id(DataFrameAnalyticsConfig.documentId(config.getId()))
.opType(DocWriteRequest.OpType.CREATE) .opType(DocWriteRequest.OpType.CREATE)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
@ -139,7 +139,7 @@ public class DataFrameAnalyticsConfigProvider {
query.filter(QueryBuilders.termQuery(DataFrameAnalyticsConfig.CONFIG_TYPE.getPreferredName(), DataFrameAnalyticsConfig.TYPE)); query.filter(QueryBuilders.termQuery(DataFrameAnalyticsConfig.CONFIG_TYPE.getPreferredName(), DataFrameAnalyticsConfig.TYPE));
query.filter(QueryBuilders.termsQuery(DataFrameAnalyticsConfig.ID.getPreferredName(), jobsWithTask)); query.filter(QueryBuilders.termsQuery(DataFrameAnalyticsConfig.ID.getPreferredName(), jobsWithTask));
SearchRequest searchRequest = new SearchRequest(AnomalyDetectorsIndex.configIndexName()); SearchRequest searchRequest = new SearchRequest(MlConfigIndex.indexName());
searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen());
searchRequest.source().size(DataFrameAnalyticsConfigProvider.MAX_CONFIGS_SIZE); searchRequest.source().size(DataFrameAnalyticsConfigProvider.MAX_CONFIGS_SIZE);
searchRequest.source().query(query); searchRequest.source().query(query);

View File

@ -46,7 +46,6 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
@ -296,7 +295,7 @@ public class JobManager {
return; return;
} }
ElasticsearchMappings.addDocMappingIfMissing( ElasticsearchMappings.addDocMappingIfMissing(
AnomalyDetectorsIndex.configIndexName(), MlConfigIndex::mapping, client, state, putJobListener); MlConfigIndex.indexName(), MlConfigIndex::mapping, client, state, putJobListener);
}, },
putJobListener::onFailure putJobListener::onFailure
); );

View File

@ -57,6 +57,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedJobValidator; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedJobValidator;
@ -126,7 +127,7 @@ public class JobConfigProvider {
public void putJob(Job job, ActionListener<IndexResponse> listener) { public void putJob(Job job, ActionListener<IndexResponse> listener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder source = job.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); XContentBuilder source = job.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName()) IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(Job.documentId(job.getId())) .id(Job.documentId(job.getId()))
.source(source) .source(source)
.opType(DocWriteRequest.OpType.CREATE) .opType(DocWriteRequest.OpType.CREATE)
@ -160,7 +161,7 @@ public class JobConfigProvider {
* @param jobListener Job listener * @param jobListener Job listener
*/ */
public void getJob(String jobId, ActionListener<Job.Builder> jobListener) { public void getJob(String jobId, ActionListener<Job.Builder> jobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId)); GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() {
@Override @Override
@ -197,7 +198,7 @@ public class JobConfigProvider {
* @param actionListener Deleted job listener * @param actionListener Deleted job listener
*/ */
public void deleteJob(String jobId, boolean errorIfMissing, ActionListener<DeleteResponse> actionListener) { public void deleteJob(String jobId, boolean errorIfMissing, ActionListener<DeleteResponse> actionListener) {
DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId)); DeleteRequest request = new DeleteRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() {
@ -233,7 +234,7 @@ public class JobConfigProvider {
*/ */
public void updateJob(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit, public void updateJob(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit,
ActionListener<Job> updatedJobListener) { ActionListener<Job> updatedJobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId)); GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
@ -297,7 +298,7 @@ public class JobConfigProvider {
*/ */
public void updateJobWithValidation(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit, public void updateJobWithValidation(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit,
UpdateValidator validator, ActionListener<Job> updatedJobListener) { UpdateValidator validator, ActionListener<Job> updatedJobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId)); GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
@ -347,7 +348,7 @@ public class JobConfigProvider {
ActionListener<Job> updatedJobListener) { ActionListener<Job> updatedJobListener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder updatedSource = updatedJob.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder updatedSource = updatedJob.toXContent(builder, ToXContent.EMPTY_PARAMS);
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName()) IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(Job.documentId(updatedJob.getId())) .id(Job.documentId(updatedJob.getId()))
.source(updatedSource) .source(updatedSource)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
@ -383,7 +384,7 @@ public class JobConfigProvider {
* @param listener Exists listener * @param listener Exists listener
*/ */
public void jobExists(String jobId, boolean errorIfMissing, ActionListener<Boolean> listener) { public void jobExists(String jobId, boolean errorIfMissing, ActionListener<Boolean> listener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId)); GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
getRequest.fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE); getRequest.fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE);
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@ -431,7 +432,7 @@ public class JobConfigProvider {
sourceBuilder.fetchSource(false); sourceBuilder.fetchSource(false);
sourceBuilder.docValueField(Job.ID.getPreferredName(), null); sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder) .setSource(sourceBuilder)
.setSize(ids.size()) .setSize(ids.size())
@ -457,7 +458,7 @@ public class JobConfigProvider {
* @param listener Responds with true if successful else an error * @param listener Responds with true if successful else an error
*/ */
public void markJobAsDeleting(String jobId, ActionListener<Boolean> listener) { public void markJobAsDeleting(String jobId, ActionListener<Boolean> listener) {
UpdateRequest updateRequest = new UpdateRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId)); UpdateRequest updateRequest = new UpdateRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
updateRequest.retryOnConflict(3); updateRequest.retryOnConflict(3);
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
updateRequest.doc(Collections.singletonMap(Job.DELETING.getPreferredName(), Boolean.TRUE)); updateRequest.doc(Collections.singletonMap(Job.DELETING.getPreferredName(), Boolean.TRUE));
@ -524,7 +525,7 @@ public class JobConfigProvider {
sourceBuilder.docValueField(Job.ID.getPreferredName(), null); sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), null); sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder) .setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) .setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
@ -582,7 +583,7 @@ public class JobConfigProvider {
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildJobWildcardQuery(tokens, excludeDeleting)); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildJobWildcardQuery(tokens, excludeDeleting));
sourceBuilder.sort(Job.ID.getPreferredName()); sourceBuilder.sort(Job.ID.getPreferredName());
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder) .setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) .setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
@ -640,7 +641,7 @@ public class JobConfigProvider {
sourceBuilder.fetchSource(false); sourceBuilder.fetchSource(false);
sourceBuilder.docValueField(Job.ID.getPreferredName(), null); sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder) .setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) .setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
@ -678,7 +679,7 @@ public class JobConfigProvider {
.query(boolQueryBuilder); .query(boolQueryBuilder);
sourceBuilder.fetchSource(false); sourceBuilder.fetchSource(false);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setSize(0) .setSize(0)
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder).request(); .setSource(sourceBuilder).request();
@ -702,7 +703,7 @@ public class JobConfigProvider {
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder() SearchSourceBuilder sourceBuilder = new SearchSourceBuilder()
.query(QueryBuilders.nestedQuery(customRulesPath, QueryBuilders.existsQuery(customRulesPath), ScoreMode.None)); .query(QueryBuilders.nestedQuery(customRulesPath, QueryBuilders.existsQuery(customRulesPath), ScoreMode.None));
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder) .setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) .setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)

View File

@ -609,7 +609,7 @@ public class JobResultsProvider {
.add(createDocIdSearch(stateIndex, Quantiles.documentId(jobId))); .add(createDocIdSearch(stateIndex, Quantiles.documentId(jobId)));
for (String filterId : job.getAnalysisConfig().extractReferencedFilters()) { for (String filterId : job.getAnalysisConfig().extractReferencedFilters()) {
msearch.add(createDocIdSearch(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId))); msearch.add(createDocIdSearch(MlMetaIndex.indexName(), MlFilter.documentId(filterId)));
} }
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, msearch.request(), executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, msearch.request(),
@ -1310,7 +1310,7 @@ public class JobResultsProvider {
} }
public void scheduledEvents(ScheduledEventsQueryBuilder query, ActionListener<QueryPage<ScheduledEvent>> handler) { public void scheduledEvents(ScheduledEventsQueryBuilder query, ActionListener<QueryPage<ScheduledEvent>> handler) {
SearchRequestBuilder request = client.prepareSearch(MlMetaIndex.INDEX_NAME) SearchRequestBuilder request = client.prepareSearch(MlMetaIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(query.build()) .setSource(query.build())
.setTrackTotalHits(true); .setTrackTotalHits(true);
@ -1453,7 +1453,7 @@ public class JobResultsProvider {
currentJobs.removeAll(jobIdsToRemove); currentJobs.removeAll(jobIdsToRemove);
Calendar updatedCalendar = new Calendar(calendar.getId(), new ArrayList<>(currentJobs), calendar.getDescription()); Calendar updatedCalendar = new Calendar(calendar.getId(), new ArrayList<>(currentJobs), calendar.getDescription());
UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.INDEX_NAME, updatedCalendar.documentId()); UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.indexName(), updatedCalendar.documentId());
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
@ -1476,7 +1476,7 @@ public class JobResultsProvider {
} }
public void calendars(CalendarQueryBuilder queryBuilder, ActionListener<QueryPage<Calendar>> listener) { public void calendars(CalendarQueryBuilder queryBuilder, ActionListener<QueryPage<Calendar>> listener) {
SearchRequest searchRequest = client.prepareSearch(MlMetaIndex.INDEX_NAME) SearchRequest searchRequest = client.prepareSearch(MlMetaIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setTrackTotalHits(true) .setTrackTotalHits(true)
.setSource(queryBuilder.build()).request(); .setSource(queryBuilder.build()).request();
@ -1517,7 +1517,7 @@ public class JobResultsProvider {
.filter(jId -> jobId.equals(jId) == false) .filter(jId -> jobId.equals(jId) == false)
.collect(Collectors.toList()); .collect(Collectors.toList());
Calendar newCalendar = new Calendar(calendar.getId(), ids, calendar.getDescription()); Calendar newCalendar = new Calendar(calendar.getId(), ids, calendar.getDescription());
UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.INDEX_NAME, newCalendar.documentId()); UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.indexName(), newCalendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
updateRequest.doc(newCalendar.toXContent(builder, ToXContent.EMPTY_PARAMS)); updateRequest.doc(newCalendar.toXContent(builder, ToXContent.EMPTY_PARAMS));
} catch (IOException e) { } catch (IOException e) {
@ -1541,7 +1541,7 @@ public class JobResultsProvider {
} }
public void calendar(String calendarId, ActionListener<Calendar> listener) { public void calendar(String calendarId, ActionListener<Calendar> listener) {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, Calendar.documentId(calendarId)); GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), Calendar.documentId(calendarId));
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() {
@Override @Override
public void onResponse(GetResponse getDocResponse) { public void onResponse(GetResponse getDocResponse) {

View File

@ -17,8 +17,8 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.utils.persistence.SearchAfterDocumentsIterator; import org.elasticsearch.xpack.ml.utils.persistence.SearchAfterDocumentsIterator;
import java.io.IOException; import java.io.IOException;
@ -29,7 +29,7 @@ public class SearchAfterJobsIterator extends SearchAfterDocumentsIterator<Job.Bu
private String lastJobId; private String lastJobId;
public SearchAfterJobsIterator(OriginSettingClient client) { public SearchAfterJobsIterator(OriginSettingClient client) {
super(client, AnomalyDetectorsIndex.configIndexName()); super(client, MlConfigIndex.indexName());
} }
@Override @Override

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryAction;
import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.dataframe.analyses.Classification; import org.elasticsearch.xpack.core.ml.dataframe.analyses.Classification;
@ -109,7 +110,7 @@ public class UnusedStateRemover implements MlDataRemover {
// and remove cluster service as a member all together. // and remove cluster service as a member all together.
jobIds.addAll(MlMetadata.getMlMetadata(clusterService.state()).getJobs().keySet()); jobIds.addAll(MlMetadata.getMlMetadata(clusterService.state()).getJobs().keySet());
DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator(client, AnomalyDetectorsIndex.configIndexName(), DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator(client, MlConfigIndex.indexName(),
QueryBuilders.termQuery(Job.JOB_TYPE.getPreferredName(), Job.ANOMALY_DETECTOR_JOB_TYPE)); QueryBuilders.termQuery(Job.JOB_TYPE.getPreferredName(), Job.ANOMALY_DETECTOR_JOB_TYPE));
while (iterator.hasNext()) { while (iterator.hasNext()) {
Deque<String> docIds = iterator.next(); Deque<String> docIds = iterator.next();
@ -121,7 +122,7 @@ public class UnusedStateRemover implements MlDataRemover {
private Set<String> getDataFrameAnalyticsJobIds() { private Set<String> getDataFrameAnalyticsJobIds() {
Set<String> jobIds = new HashSet<>(); Set<String> jobIds = new HashSet<>();
DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator(client, AnomalyDetectorsIndex.configIndexName(), DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator(client, MlConfigIndex.indexName(),
QueryBuilders.termQuery(DataFrameAnalyticsConfig.CONFIG_TYPE.getPreferredName(), DataFrameAnalyticsConfig.TYPE)); QueryBuilders.termQuery(DataFrameAnalyticsConfig.CONFIG_TYPE.getPreferredName(), DataFrameAnalyticsConfig.TYPE));
while (iterator.hasNext()) { while (iterator.hasNext()) {
Deque<String> docIds = iterator.next(); Deque<String> docIds = iterator.next();

View File

@ -28,6 +28,7 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction;
@ -35,7 +36,6 @@ import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobTests; import org.elasticsearch.xpack.core.ml.job.config.JobTests;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.junit.Before; import org.junit.Before;
import java.net.InetAddress; import java.net.InetAddress;
@ -142,14 +142,14 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase {
} }
private void addMlConfigIndex(Metadata.Builder metadata, RoutingTable.Builder routingTable) { private void addMlConfigIndex(Metadata.Builder metadata, RoutingTable.Builder routingTable) {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(AnomalyDetectorsIndex.configIndexName()); IndexMetadata.Builder indexMetadata = IndexMetadata.builder(MlConfigIndex.indexName());
indexMetadata.settings(Settings.builder() indexMetadata.settings(Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
); );
metadata.put(indexMetadata); metadata.put(indexMetadata);
Index index = new Index(AnomalyDetectorsIndex.configIndexName(), "_uuid"); Index index = new Index(MlConfigIndex.indexName(), "_uuid");
ShardId shardId = new ShardId(index, 0); ShardId shardId = new ShardId(index, 0);
ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE, ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""));

View File

@ -33,6 +33,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata.Assignment;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction;
@ -237,9 +238,9 @@ public class TransportOpenJobActionTests extends ESTestCase {
private void addIndices(Metadata.Builder metadata, RoutingTable.Builder routingTable) { private void addIndices(Metadata.Builder metadata, RoutingTable.Builder routingTable) {
List<String> indices = new ArrayList<>(); List<String> indices = new ArrayList<>();
indices.add(AnomalyDetectorsIndex.configIndexName()); indices.add(MlConfigIndex.indexName());
indices.add(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX); indices.add(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX);
indices.add(MlMetaIndex.INDEX_NAME); indices.add(MlMetaIndex.indexName());
indices.add(NotificationsIndex.NOTIFICATIONS_INDEX); indices.add(NotificationsIndex.NOTIFICATIONS_INDEX);
indices.add(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT); indices.add(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT);
for (String indexName : indices) { for (String indexName : indices) {

View File

@ -21,7 +21,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import java.util.List; import java.util.List;
@ -33,7 +33,7 @@ public class TransportStartDataFrameAnalyticsActionTests extends ESTestCase {
public void testVerifyIndicesPrimaryShardsAreActive() { public void testVerifyIndicesPrimaryShardsAreActive() {
// At present the only critical index is the config index // At present the only critical index is the config index
String indexName = AnomalyDetectorsIndex.configIndexName(); String indexName = MlConfigIndex.indexName();
Metadata.Builder metadata = Metadata.builder(); Metadata.Builder metadata = Metadata.builder();
RoutingTable.Builder routingTable = RoutingTable.builder(); RoutingTable.Builder routingTable = RoutingTable.builder();

View File

@ -42,6 +42,7 @@ import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MachineLearningField;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.action.PutJobAction; import org.elasticsearch.xpack.core.ml.action.PutJobAction;
import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction;
@ -54,7 +55,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
import org.elasticsearch.xpack.core.ml.job.config.RuleScope; import org.elasticsearch.xpack.core.ml.job.config.RuleScope;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck; import org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck;
import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests; import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests;
@ -219,7 +219,7 @@ public class JobManagerTests extends ESTestCase {
docsAsBytes.add(toBytesReference(indexJobFoo.build())); docsAsBytes.add(toBytesReference(indexJobFoo.build()));
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test"); MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
mockClientBuilder.prepareSearch(AnomalyDetectorsIndex.configIndexName(), docsAsBytes); mockClientBuilder.prepareSearch(MlConfigIndex.indexName(), docsAsBytes);
JobManager jobManager = createJobManager(mockClientBuilder.build()); JobManager jobManager = createJobManager(mockClientBuilder.build());
@ -366,7 +366,7 @@ public class JobManagerTests extends ESTestCase {
}).when(updateJobProcessNotifier).submitJobUpdate(any(), any()); }).when(updateJobProcessNotifier).submitJobUpdate(any(), any());
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test"); MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
mockClientBuilder.prepareSearch(AnomalyDetectorsIndex.configIndexName(), docsAsBytes); mockClientBuilder.prepareSearch(MlConfigIndex.indexName(), docsAsBytes);
JobManager jobManager = createJobManager(mockClientBuilder.build()); JobManager jobManager = createJobManager(mockClientBuilder.build());
MlFilter filter = MlFilter.builder("foo_filter").setItems("a", "b").build(); MlFilter filter = MlFilter.builder("foo_filter").setItems("a", "b").build();
@ -417,7 +417,7 @@ public class JobManagerTests extends ESTestCase {
when(clusterService.state()).thenReturn(clusterState); when(clusterService.state()).thenReturn(clusterState);
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test"); MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
mockClientBuilder.prepareSearch(AnomalyDetectorsIndex.configIndexName(), docsAsBytes); mockClientBuilder.prepareSearch(MlConfigIndex.indexName(), docsAsBytes);
JobManager jobManager = createJobManager(mockClientBuilder.build()); JobManager jobManager = createJobManager(mockClientBuilder.build());
MlFilter filter = MlFilter.builder("foo_filter").build(); MlFilter filter = MlFilter.builder("foo_filter").build();
@ -453,7 +453,7 @@ public class JobManagerTests extends ESTestCase {
when(clusterService.state()).thenReturn(clusterState); when(clusterService.state()).thenReturn(clusterState);
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test"); MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
mockClientBuilder.prepareSearch(AnomalyDetectorsIndex.configIndexName(), docsAsBytes); mockClientBuilder.prepareSearch(MlConfigIndex.indexName(), docsAsBytes);
JobManager jobManager = createJobManager(mockClientBuilder.build()); JobManager jobManager = createJobManager(mockClientBuilder.build());
MlFilter filter = MlFilter.builder("foo_filter").build(); MlFilter filter = MlFilter.builder("foo_filter").build();
@ -474,14 +474,14 @@ public class JobManagerTests extends ESTestCase {
Metadata.Builder metadata = Metadata.builder(); Metadata.Builder metadata = Metadata.builder();
RoutingTable.Builder routingTable = RoutingTable.builder(); RoutingTable.Builder routingTable = RoutingTable.builder();
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(AnomalyDetectorsIndex.configIndexName()); IndexMetadata.Builder indexMetadata = IndexMetadata.builder(MlConfigIndex.indexName());
indexMetadata.settings(Settings.builder() indexMetadata.settings(Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
); );
metadata.put(indexMetadata); metadata.put(indexMetadata);
Index index = new Index(AnomalyDetectorsIndex.configIndexName(), "_uuid"); Index index = new Index(MlConfigIndex.indexName(), "_uuid");
ShardId shardId = new ShardId(index, 0); ShardId shardId = new ShardId(index, 0);
ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE, ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""));
@ -521,7 +521,7 @@ public class JobManagerTests extends ESTestCase {
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test"); MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
// For the JobConfigProvider expand groups search. // For the JobConfigProvider expand groups search.
// The search will not return any results // The search will not return any results
mockClientBuilder.prepareSearchFields(AnomalyDetectorsIndex.configIndexName(), Collections.emptyList()); mockClientBuilder.prepareSearchFields(MlConfigIndex.indexName(), Collections.emptyList());
JobManager jobManager = createJobManager(mockClientBuilder.build()); JobManager jobManager = createJobManager(mockClientBuilder.build());
@ -564,7 +564,7 @@ public class JobManagerTests extends ESTestCase {
new DocumentField(Job.ID.getPreferredName(), Collections.singletonList("job-2")))); new DocumentField(Job.ID.getPreferredName(), Collections.singletonList("job-2"))));
mockClientBuilder.prepareSearchFields(AnomalyDetectorsIndex.configIndexName(), fieldHits); mockClientBuilder.prepareSearchFields(MlConfigIndex.indexName(), fieldHits);
JobManager jobManager = createJobManager(mockClientBuilder.build()); JobManager jobManager = createJobManager(mockClientBuilder.build());
jobManager.updateProcessOnCalendarChanged(Collections.singletonList("group-1"), jobManager.updateProcessOnCalendarChanged(Collections.singletonList("group-1"),

View File

@ -19,6 +19,7 @@ import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
@ -86,10 +87,10 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase {
templates.addAll( templates.addAll(
Arrays.asList( Arrays.asList(
NotificationsIndex.NOTIFICATIONS_INDEX, NotificationsIndex.NOTIFICATIONS_INDEX,
MlMetaIndex.INDEX_NAME, MlMetaIndex.indexName(),
AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX,
AnomalyDetectorsIndex.jobResultsIndexPrefix(), AnomalyDetectorsIndex.jobResultsIndexPrefix(),
AnomalyDetectorsIndex.configIndexName(), MlConfigIndex.indexName(),
TransformInternalIndexConstants.AUDIT_INDEX, TransformInternalIndexConstants.AUDIT_INDEX,
TransformInternalIndexConstants.LATEST_INDEX_NAME TransformInternalIndexConstants.LATEST_INDEX_NAME
)); ));