[7.x] Use static methods (rather than constants) to obtain .ml-meta and .ml-config index names (#58484) (#58490)

This commit is contained in:
Przemysław Witek 2020-06-24 15:52:45 +02:00 committed by GitHub
parent fa88e71532
commit 551b8bcd73
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 159 additions and 143 deletions

View File

@ -11,18 +11,31 @@ import org.elasticsearch.xpack.core.template.TemplateUtils;
import java.util.Collections;
public class MlConfigIndex {
public final class MlConfigIndex {
private static final String INDEX_NAME = ".ml-config";
private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version";
private MlConfigIndex() {}
/**
* The name of the index where job, datafeed and analytics configuration is stored
*
* @return The index name
*/
public static String indexName() {
return INDEX_NAME;
}
public static String mapping() {
return mapping(MapperService.SINGLE_MAPPING_NAME);
}
public static String mapping(String mappingType) {
return TemplateUtils.loadTemplate("/org/elasticsearch/xpack/core/ml/config_index_mappings.json",
Version.CURRENT.toString(), MAPPINGS_VERSION_VARIABLE, Collections.singletonMap("xpack.ml.mapping_type", mappingType));
return TemplateUtils.loadTemplate(
"/org/elasticsearch/xpack/core/ml/config_index_mappings.json",
Version.CURRENT.toString(),
MAPPINGS_VERSION_VARIABLE,
Collections.singletonMap("xpack.ml.mapping_type", mappingType));
}
private MlConfigIndex() {}
}

View File

@ -6,12 +6,18 @@
package org.elasticsearch.xpack.core.ml;
public final class MlMetaIndex {
private static final String INDEX_NAME = ".ml-meta";
/**
* Where to store the ml info in Elasticsearch - must match what's
* expected by kibana/engineAPI/app/directives/mlLogUsage.js
*
* @return The index name
*/
public static final String INDEX_NAME = ".ml-meta";
public static String indexName() {
return INDEX_NAME;
}
private MlMetaIndex() {}
}

View File

@ -68,15 +68,6 @@ public final class AnomalyDetectorsIndex {
return AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + "*";
}
/**
* The name of the index where job and datafeed configuration
* is stored
* @return The index name
*/
public static String configIndexName() {
return AnomalyDetectorsIndexFields.CONFIG_INDEX;
}
/**
* Creates the .ml-state-000001 index (if necessary)
* Creates the .ml-state-write alias for the .ml-state-000001 index (if necessary)

View File

@ -7,8 +7,6 @@ package org.elasticsearch.xpack.core.ml.job.persistence;
public final class AnomalyDetectorsIndexFields {
public static final String CONFIG_INDEX = ".ml-config";
public static final String STATE_INDEX_PREFIX = ".ml-state";
public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-";

View File

@ -56,6 +56,7 @@ import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction;
import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction;
import org.elasticsearch.xpack.core.ilm.action.StartILMAction;
import org.elasticsearch.xpack.core.ilm.action.StopILMAction;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction;
@ -1164,8 +1165,8 @@ public class ReservedRolesStoreTests extends ESTestCase {
assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false));
assertNoAccessAllowed(role, "foo");
assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.CONFIG_INDEX); // internal use only
assertOnlyReadAllowed(role, MlMetaIndex.INDEX_NAME);
assertNoAccessAllowed(role, MlConfigIndex.indexName()); // internal use only
assertOnlyReadAllowed(role, MlMetaIndex.indexName());
assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX);
assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT);
assertOnlyReadAllowed(role, NotificationsIndex.NOTIFICATIONS_INDEX);
@ -1320,8 +1321,8 @@ public class ReservedRolesStoreTests extends ESTestCase {
assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false));
assertNoAccessAllowed(role, "foo");
assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.CONFIG_INDEX);
assertNoAccessAllowed(role, MlMetaIndex.INDEX_NAME);
assertNoAccessAllowed(role, MlConfigIndex.indexName());
assertNoAccessAllowed(role, MlMetaIndex.indexName());
assertNoAccessAllowed(role, AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX);
assertOnlyReadAllowed(role, AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT);
assertOnlyReadAllowed(role, NotificationsIndex.NOTIFICATIONS_INDEX);

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction;
@ -17,7 +18,6 @@ import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields;
import org.junit.After;
import java.util.Collections;
@ -42,8 +42,8 @@ public class JobAndDatafeedResilienceIT extends MlNativeAutodetectIntegTestCase
putJob(job);
openJob(job.getId());
client().prepareDelete(AnomalyDetectorsIndexFields.CONFIG_INDEX, "_doc", Job.documentId(jobId)).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndexFields.CONFIG_INDEX).get();
client().prepareDelete(MlConfigIndex.indexName(), "_doc", Job.documentId(jobId)).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> {
CloseJobAction.Request request = new CloseJobAction.Request(jobId);
@ -82,8 +82,8 @@ public class JobAndDatafeedResilienceIT extends MlNativeAutodetectIntegTestCase
putDatafeed(datafeedConfig);
startDatafeed(datafeedConfig.getId(), 0L, null);
client().prepareDelete(AnomalyDetectorsIndexFields.CONFIG_INDEX, "_doc", DatafeedConfig.documentId(datafeedConfig.getId())).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndexFields.CONFIG_INDEX).get();
client().prepareDelete(MlConfigIndex.indexName(), "_doc", DatafeedConfig.documentId(datafeedConfig.getId())).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> {
StopDatafeedAction.Request request = new StopDatafeedAction.Request(datafeedConfig.getId());
@ -120,8 +120,8 @@ public class JobAndDatafeedResilienceIT extends MlNativeAutodetectIntegTestCase
putJob(job2);
openJob(job2.getId());
client().prepareDelete(AnomalyDetectorsIndexFields.CONFIG_INDEX, "_doc", Job.documentId(jobId1)).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndexFields.CONFIG_INDEX).get();
client().prepareDelete(MlConfigIndex.indexName(), "_doc", Job.documentId(jobId1)).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
List<GetJobsStatsAction.Response.JobStats> jobStats = client().execute(GetJobsStatsAction.INSTANCE,
new GetJobsStatsAction.Request("*"))
@ -175,8 +175,8 @@ public class JobAndDatafeedResilienceIT extends MlNativeAutodetectIntegTestCase
putDatafeed(datafeedConfig2);
startDatafeed(datafeedConfig2.getId(), 0L, null);
client().prepareDelete(AnomalyDetectorsIndexFields.CONFIG_INDEX, "_doc", DatafeedConfig.documentId(datafeedConfig1.getId())).get();
client().admin().indices().prepareRefresh(AnomalyDetectorsIndexFields.CONFIG_INDEX).get();
client().prepareDelete(MlConfigIndex.indexName(), "_doc", DatafeedConfig.documentId(datafeedConfig1.getId())).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
List<GetDatafeedsStatsAction.Response.DatafeedStats> dfStats = client().execute(GetDatafeedsStatsAction.INSTANCE,
new GetDatafeedsStatsAction.Request("*"))

View File

@ -157,7 +157,7 @@ abstract class MlNativeIntegTestCase extends ESIntegTestCase {
protected Set<String> excludeTemplates() {
return new HashSet<>(Arrays.asList(
NotificationsIndex.NOTIFICATIONS_INDEX,
MlMetaIndex.INDEX_NAME,
MlMetaIndex.indexName(),
AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX,
AnomalyDetectorsIndex.jobResultsIndexPrefix(),
InferenceIndexConstants.LATEST_INDEX_NAME,

View File

@ -27,6 +27,7 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.xpack.core.TestXPackTransportClient;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.DeleteDatafeedAction;
import org.elasticsearch.xpack.core.ml.action.DeleteJobAction;
@ -51,7 +52,6 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.Tree;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.TreeNode;
import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.LocalStateMachineLearning;
import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase;
import org.junit.Before;
@ -250,7 +250,7 @@ public class MachineLearningLicensingIT extends BaseMlIntegTestCase {
}
assertMLAllowed(false);
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
// now that the license is invalid, the job should be closed and datafeed stopped:
assertBusy(() -> {

View File

@ -14,11 +14,11 @@ import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.MlSingleNodeTestCase;
import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider;
import org.hamcrest.core.IsInstanceOf;
@ -249,7 +249,7 @@ public class DatafeedConfigProviderIT extends MlSingleNodeTestCase {
DatafeedConfig bar2 = putDatafeedConfig(createDatafeedConfig("bar-2", "j4"), Collections.emptyMap());
putDatafeedConfig(createDatafeedConfig("not-used", "j5"), Collections.emptyMap());
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
// Test datafeed IDs only
SortedSet<String> expandedIds =
@ -302,7 +302,7 @@ public class DatafeedConfigProviderIT extends MlSingleNodeTestCase {
public void testExpandDatafeedsWithTaskData() throws Exception {
putDatafeedConfig(createDatafeedConfig("foo-2", "j2"), Collections.emptyMap());
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder();
tasksBuilder.addTask(MlTasks.datafeedTaskId("foo-1"),
@ -330,7 +330,7 @@ public class DatafeedConfigProviderIT extends MlSingleNodeTestCase {
putDatafeedConfig(createDatafeedConfig("foo-2", "j2"), Collections.emptyMap());
putDatafeedConfig(createDatafeedConfig("bar-1", "j3"), Collections.emptyMap());
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
AtomicReference<Set<String>> datafeedIdsHolder = new AtomicReference<>();
AtomicReference<Exception> exceptionHolder = new AtomicReference<>();

View File

@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.OpenJobAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
@ -30,7 +31,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Operator;
import org.elasticsearch.xpack.core.ml.job.config.RuleCondition;
import org.elasticsearch.xpack.core.ml.job.config.RuleScope;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.MlSingleNodeTestCase;
import org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider;
import org.junit.Before;
@ -297,7 +297,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
Job harry = putJob(createJob("harry", Collections.singletonList("harry-group")));
Job harryJnr = putJob(createJob("harry-jnr", Collections.singletonList("harry-group")));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
// Job Ids
SortedSet<String> expandedIds = blockingCall(actionListener ->
@ -369,7 +369,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
Job bar2 = putJob(createJob("bar-2", Collections.singletonList("bar")));
Job nbar = putJob(createJob("nbar", Collections.singletonList("bar")));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
// Test job IDs only
SortedSet<String> expandedIds = blockingCall(actionListener -> jobConfigProvider.expandJobsIds("foo*",
@ -417,7 +417,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
Boolean marked = blockingCall(actionListener -> jobConfigProvider.markJobAsDeleting("foo-deleting", actionListener));
assertTrue(marked);
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
SortedSet<String> expandedIds = blockingCall(actionListener -> jobConfigProvider.expandJobsIds("foo*",
true,
@ -448,7 +448,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
putJob(createJob("foo-1", null));
putJob(createJob("bar", null));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder();
tasksBuilder.addTask(MlTasks.jobTaskId("foo-2"),
@ -480,7 +480,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
putJob(createJob("tomato", Arrays.asList("fruit", "veg")));
putJob(createJob("unrelated", Collections.emptyList()));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
SortedSet<String> expandedIds = blockingCall(actionListener ->
jobConfigProvider.expandGroupIds(Collections.singletonList("fruit"), actionListener));
@ -517,7 +517,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
jobWithRules2 = addCustomRule(jobWithRules2, rule);
putJob(jobWithRules2);
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
List<Job> foundJobs = blockingCall(listener -> jobConfigProvider.findJobsWithCustomRules(listener));
@ -568,7 +568,7 @@ public class JobConfigProviderIT extends MlSingleNodeTestCase {
String jobId = "mark-as-deleting-job";
putJob(createJob(jobId, Collections.emptyList()));
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.configIndexName()).get();
client().admin().indices().prepareRefresh(MlConfigIndex.indexName()).get();
exceptionHolder.set(null);
blockingCall(listener -> jobConfigProvider.markJobAsDeleting(jobId, listener), responseHolder, exceptionHolder);

View File

@ -428,7 +428,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
throw exceptionHolder.get();
}
client().admin().indices().prepareRefresh(MlMetaIndex.INDEX_NAME).get();
client().admin().indices().prepareRefresh(MlMetaIndex.indexName()).get();
}
private Calendar getCalendar(String calendarId) throws Exception {
@ -586,7 +586,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
Quantiles quantiles = new Quantiles(jobId, new Date(), "quantile-state");
indexQuantiles(quantiles);
client().admin().indices().prepareRefresh(MlMetaIndex.INDEX_NAME, AnomalyDetectorsIndex.jobStateIndexPattern(),
client().admin().indices().prepareRefresh(MlMetaIndex.indexName(), AnomalyDetectorsIndex.jobStateIndexPattern(),
AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).get();
@ -706,7 +706,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (ScheduledEvent event : events) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME);
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(
ToXContentParams.FOR_INTERNAL_STORAGE, "true"));
@ -730,7 +730,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (MlFilter filter : filters) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId());
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(filter.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(
ToXContentParams.FOR_INTERNAL_STORAGE, "true"));
@ -764,7 +764,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (Calendar calendar: calendars) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(calendar.documentId());
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(calendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(
Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true"));

View File

@ -36,6 +36,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.config.Job;
@ -410,14 +411,14 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
}
private void addMlConfigIndex(Metadata.Builder metadata, RoutingTable.Builder routingTable) {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(AnomalyDetectorsIndex.configIndexName());
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(MlConfigIndex.indexName());
indexMetadata.settings(Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
);
metadata.put(indexMetadata);
Index index = new Index(AnomalyDetectorsIndex.configIndexName(), "_uuid");
Index index = new Index(MlConfigIndex.indexName(), "_uuid");
ShardId shardId = new ShardId(index, 0);
ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""));
@ -449,7 +450,7 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
}
private boolean configIndexExists() {
return client().admin().indices().prepareExists(AnomalyDetectorsIndex.configIndexName()).get().isExists();
return client().admin().indices().prepareExists(MlConfigIndex.indexName()).get().isExists();
}
}

View File

@ -66,6 +66,7 @@ import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.XPackPlugin;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.ml.MachineLearningField;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.action.CloseJobAction;
import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction;
@ -966,7 +967,7 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin,
List<String> templateNames =
Arrays.asList(
NotificationsIndex.NOTIFICATIONS_INDEX,
MlMetaIndex.INDEX_NAME,
MlMetaIndex.indexName(),
AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX,
AnomalyDetectorsIndex.jobResultsIndexPrefix(),
InferenceIndexConstants.LATEST_INDEX_NAME);
@ -1011,8 +1012,8 @@ public class MachineLearning extends Plugin implements SystemIndexPlugin,
@Override
public Collection<SystemIndexDescriptor> getSystemIndexDescriptors(Settings settings) {
return Collections.unmodifiableList(Arrays.asList(
new SystemIndexDescriptor(MlMetaIndex.INDEX_NAME, "Contains scheduling and anomaly tracking metadata"),
new SystemIndexDescriptor(AnomalyDetectorsIndexFields.CONFIG_INDEX, "Contains ML configuration data"),
new SystemIndexDescriptor(MlMetaIndex.indexName(), "Contains scheduling and anomaly tracking metadata"),
new SystemIndexDescriptor(MlConfigIndex.indexName(), "Contains ML configuration data"),
new SystemIndexDescriptor(InferenceIndexConstants.INDEX_PATTERN, "Contains ML model configuration and statistics")
));
}

View File

@ -12,10 +12,10 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
/**
* Checks whether migration can start and whether ML resources (e.g. jobs, datafeeds)
@ -63,11 +63,11 @@ public class MlConfigMigrationEligibilityCheck {
}
static boolean mlConfigIndexIsAllocated(ClusterState clusterState) {
if (clusterState.metadata().hasIndex(AnomalyDetectorsIndex.configIndexName()) == false) {
if (clusterState.metadata().hasIndex(MlConfigIndex.indexName()) == false) {
return false;
}
IndexRoutingTable routingTable = clusterState.getRoutingTable().index(AnomalyDetectorsIndex.configIndexName());
IndexRoutingTable routingTable = clusterState.getRoutingTable().index(MlConfigIndex.indexName());
if (routingTable == null || routingTable.allPrimaryShardsActive() == false) {
return false;
}

View File

@ -166,7 +166,7 @@ public class MlConfigMigrator {
return;
}
if (clusterState.metadata().hasIndex(AnomalyDetectorsIndex.configIndexName()) == false) {
if (clusterState.metadata().hasIndex(MlConfigIndex.indexName()) == false) {
createConfigIndex(ActionListener.wrap(
response -> {
unMarkMigrationInProgress.onResponse(Boolean.FALSE);
@ -424,7 +424,7 @@ public class MlConfigMigrator {
}
private IndexRequest indexRequest(ToXContentObject source, String documentId, ToXContent.Params params) {
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName()).id(documentId);
IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName()).id(documentId);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(source.toXContent(builder, params));
@ -490,7 +490,7 @@ public class MlConfigMigrator {
private void createConfigIndex(ActionListener<Boolean> listener) {
logger.info("creating the .ml-config index");
CreateIndexRequest createIndexRequest = new CreateIndexRequest(AnomalyDetectorsIndex.configIndexName());
CreateIndexRequest createIndexRequest = new CreateIndexRequest(MlConfigIndex.indexName());
try
{
createIndexRequest.settings(

View File

@ -42,7 +42,7 @@ public class MlIndexTemplateRegistry extends IndexTemplateRegistry {
private static final IndexTemplateConfig ANOMALY_DETECTION_STATE_TEMPLATE = stateTemplate();
private static final IndexTemplateConfig META_TEMPLATE = new IndexTemplateConfig(MlMetaIndex.INDEX_NAME,
private static final IndexTemplateConfig META_TEMPLATE = new IndexTemplateConfig(MlMetaIndex.indexName(),
ROOT_RESOURCE_PATH + "meta_index_template.json", Version.CURRENT.id, VERSION_PATTERN,
Collections.singletonMap(VERSION_ID_PATTERN, String.valueOf(Version.CURRENT.id)));
@ -69,7 +69,7 @@ public class MlIndexTemplateRegistry extends IndexTemplateRegistry {
String.valueOf(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW));
variables.put("xpack.ml.config.mappings", MlConfigIndex.mapping());
return new IndexTemplateConfig(AnomalyDetectorsIndex.configIndexName(),
return new IndexTemplateConfig(MlConfigIndex.indexName(),
ROOT_RESOURCE_PATH + "config_index_template.json",
Version.CURRENT.id, VERSION_PATTERN,
variables);

View File

@ -74,7 +74,7 @@ public class TransportDeleteCalendarAction extends HandledTransportAction<Delete
}
private DeleteByQueryRequest buildDeleteByQuery(String calendarId) {
DeleteByQueryRequest request = new DeleteByQueryRequest(MlMetaIndex.INDEX_NAME);
DeleteByQueryRequest request = new DeleteByQueryRequest(MlMetaIndex.indexName());
request.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
request.setRefresh(true);

View File

@ -55,7 +55,7 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction<D
ActionListener<Calendar> calendarListener = ActionListener.wrap(
calendar -> {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, eventId);
GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), eventId);
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap(
getResponse -> {
if (getResponse.isExists() == false) {
@ -89,7 +89,7 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction<D
}
private void deleteEvent(String eventId, Calendar calendar, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, eventId);
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.indexName(), eventId);
deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest,

View File

@ -39,6 +39,7 @@ import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlStatsIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.DeleteDataFrameAnalyticsAction;
@ -222,7 +223,7 @@ public class TransportDeleteDataFrameAnalyticsAction
}
private void deleteConfig(ParentTaskAssigningClient parentTaskClient, String id, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(AnomalyDetectorsIndex.configIndexName());
DeleteRequest deleteRequest = new DeleteRequest(MlConfigIndex.indexName());
deleteRequest.id(DataFrameAnalyticsConfig.documentId(id));
deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest, ActionListener.wrap(

View File

@ -82,7 +82,7 @@ public class TransportDeleteFilterAction extends HandledTransportAction<DeleteFi
}
private void deleteFilter(String filterId, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId));
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.indexName(), MlFilter.documentId(filterId));
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
bulkRequestBuilder.add(deleteRequest);
bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);

View File

@ -24,9 +24,9 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.utils.VoidChainTaskExecutor;
@ -76,7 +76,7 @@ public class TransportFinalizeJobExecutionAction extends TransportMasterNodeActi
Map<String, Object> update = Collections.singletonMap(Job.FINISHED_TIME.getPreferredName(), new Date());
for (String jobId: request.getJobIds()) {
UpdateRequest updateRequest = new UpdateRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
UpdateRequest updateRequest = new UpdateRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
updateRequest.retryOnConflict(3);
updateRequest.doc(update);
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);

View File

@ -19,9 +19,9 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.action.AbstractTransportGetResourcesAction;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
@ -43,7 +43,7 @@ public class TransportGetDataFrameAnalyticsAction extends AbstractTransportGetRe
@Override
protected String[] getIndices() {
return new String[] { AnomalyDetectorsIndex.configIndexName() };
return new String[] { MlConfigIndex.indexName() };
}
@Override

View File

@ -54,7 +54,7 @@ public class TransportGetFiltersAction extends AbstractTransportGetResourcesActi
@Override
protected String[] getIndices() {
return new String[]{MlMetaIndex.INDEX_NAME};
return new String[]{MlMetaIndex.indexName()};
}
@Override

View File

@ -42,6 +42,7 @@ import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks;
@ -134,11 +135,11 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
static String[] indicesOfInterest(String resultsIndex) {
if (resultsIndex == null) {
return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), MlMetaIndex.INDEX_NAME,
AnomalyDetectorsIndex.configIndexName()};
return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), MlMetaIndex.indexName(),
MlConfigIndex.indexName()};
}
return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), resultsIndex, MlMetaIndex.INDEX_NAME,
AnomalyDetectorsIndex.configIndexName()};
return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), resultsIndex, MlMetaIndex.indexName(),
MlConfigIndex.indexName()};
}
static List<String> verifyIndicesPrimaryShardsAreActive(String resultsWriteIndex, ClusterState clusterState,

View File

@ -62,7 +62,7 @@ public class TransportPostCalendarEventsAction extends HandledTransportAction<Po
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
for (ScheduledEvent event: events) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME);
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(event.toXContent(builder,
new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE,

View File

@ -47,7 +47,7 @@ public class TransportPutCalendarAction extends HandledTransportAction<PutCalend
protected void doExecute(Task task, PutCalendarAction.Request request, ActionListener<PutCalendarAction.Response> listener) {
Calendar calendar = request.getCalendar();
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(calendar.documentId());
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(calendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(calendar.toXContent(builder,
new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true"))));

View File

@ -39,7 +39,6 @@ import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.PutDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.security.SecurityContext;
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction;
@ -212,7 +211,7 @@ public class TransportPutDataFrameAnalyticsAction
return;
}
ElasticsearchMappings.addDocMappingIfMissing(
AnomalyDetectorsIndex.configIndexName(),
MlConfigIndex.indexName(),
MlConfigIndex::mapping,
client,
clusterState,

View File

@ -40,7 +40,6 @@ import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction;
@ -211,7 +210,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction<PutDat
return;
}
ElasticsearchMappings.addDocMappingIfMissing(
AnomalyDetectorsIndex.configIndexName(),
MlConfigIndex.indexName(),
MlConfigIndex::mapping,
client,
clusterState,

View File

@ -47,7 +47,7 @@ public class TransportPutFilterAction extends HandledTransportAction<PutFilterAc
@Override
protected void doExecute(Task task, PutFilterAction.Request request, ActionListener<PutFilterAction.Response> listener) {
MlFilter filter = request.getFilter();
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId());
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(filter.documentId());
indexRequest.opType(DocWriteRequest.OpType.CREATE);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {

View File

@ -48,6 +48,7 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.common.validation.SourceDestValidator;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlStatsIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
@ -640,7 +641,7 @@ public class TransportStartDataFrameAnalyticsAction
List<String> unavailableIndices =
verifyIndicesPrimaryShardsAreActive(clusterState,
resolver,
AnomalyDetectorsIndex.configIndexName(),
MlConfigIndex.indexName(),
MlStatsIndex.indexPattern(),
AnomalyDetectorsIndex.jobStateIndexPattern());
if (unavailableIndices.size() != 0) {

View File

@ -101,7 +101,7 @@ public class TransportUpdateFilterAction extends HandledTransportAction<UpdateFi
private void indexUpdatedFilter(MlFilter filter, final long seqNo, final long primaryTerm,
UpdateFilterAction.Request request,
ActionListener<PutFilterAction.Response> listener) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId());
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.indexName()).id(filter.documentId());
indexRequest.setIfSeqNo(seqNo);
indexRequest.setIfPrimaryTerm(primaryTerm);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
@ -137,7 +137,7 @@ public class TransportUpdateFilterAction extends HandledTransportAction<UpdateFi
}
private void getFilterWithVersion(String filterId, ActionListener<FilterWithSeqNo> listener) {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId));
GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), MlFilter.documentId(filterId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getDocResponse) {

View File

@ -46,6 +46,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate;
@ -117,7 +118,7 @@ public class DatafeedConfigProvider {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder source = config.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(DatafeedConfig.documentId(datafeedId))
.source(source)
.opType(DocWriteRequest.OpType.CREATE)
@ -152,7 +153,7 @@ public class DatafeedConfigProvider {
* @param datafeedConfigListener The config listener
*/
public void getDatafeedConfig(String datafeedId, ActionListener<DatafeedConfig.Builder> datafeedConfigListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId));
GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), DatafeedConfig.documentId(datafeedId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getResponse) {
@ -189,7 +190,7 @@ public class DatafeedConfigProvider {
sourceBuilder.fetchSource(false);
sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSize(jobIds.size())
.setSource(sourceBuilder).request();
@ -219,7 +220,7 @@ public class DatafeedConfigProvider {
* @param actionListener Deleted datafeed listener
*/
public void deleteDatafeedConfig(String datafeedId, ActionListener<DeleteResponse> actionListener) {
DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId));
DeleteRequest request = new DeleteRequest(MlConfigIndex.indexName(), DatafeedConfig.documentId(datafeedId));
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() {
@Override
@ -256,7 +257,7 @@ public class DatafeedConfigProvider {
public void updateDatefeedConfig(String datafeedId, DatafeedUpdate update, Map<String, String> headers,
BiConsumer<DatafeedConfig, ActionListener<Boolean>> validator,
ActionListener<DatafeedConfig> updatedConfigListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId));
GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), DatafeedConfig.documentId(datafeedId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override
@ -312,7 +313,7 @@ public class DatafeedConfigProvider {
ActionListener<IndexResponse> listener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder updatedSource = updatedConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(DatafeedConfig.documentId(updatedConfig.getId()))
.source(updatedSource)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
@ -365,7 +366,7 @@ public class DatafeedConfigProvider {
sourceBuilder.fetchSource(false);
sourceBuilder.docValueField(DatafeedConfig.ID.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
@ -417,7 +418,7 @@ public class DatafeedConfigProvider {
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildDatafeedIdQuery(tokens));
sourceBuilder.sort(DatafeedConfig.ID.getPreferredName());
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)

View File

@ -31,9 +31,9 @@ import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.core.action.util.PageParams;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.ToXContentParams;
@ -79,7 +79,7 @@ public class DataFrameAnalyticsConfigProvider {
}
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
config.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(DataFrameAnalyticsConfig.documentId(config.getId()))
.opType(DocWriteRequest.OpType.CREATE)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
@ -139,7 +139,7 @@ public class DataFrameAnalyticsConfigProvider {
query.filter(QueryBuilders.termQuery(DataFrameAnalyticsConfig.CONFIG_TYPE.getPreferredName(), DataFrameAnalyticsConfig.TYPE));
query.filter(QueryBuilders.termsQuery(DataFrameAnalyticsConfig.ID.getPreferredName(), jobsWithTask));
SearchRequest searchRequest = new SearchRequest(AnomalyDetectorsIndex.configIndexName());
SearchRequest searchRequest = new SearchRequest(MlConfigIndex.indexName());
searchRequest.indicesOptions(IndicesOptions.lenientExpandOpen());
searchRequest.source().size(DataFrameAnalyticsConfigProvider.MAX_CONFIGS_SIZE);
searchRequest.source().query(query);

View File

@ -46,7 +46,6 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
@ -296,7 +295,7 @@ public class JobManager {
return;
}
ElasticsearchMappings.addDocMappingIfMissing(
AnomalyDetectorsIndex.configIndexName(), MlConfigIndex::mapping, client, state, putJobListener);
MlConfigIndex.indexName(), MlConfigIndex::mapping, client, state, putJobListener);
},
putJobListener::onFailure
);

View File

@ -57,6 +57,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedJobValidator;
@ -126,7 +127,7 @@ public class JobConfigProvider {
public void putJob(Job job, ActionListener<IndexResponse> listener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder source = job.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(Job.documentId(job.getId()))
.source(source)
.opType(DocWriteRequest.OpType.CREATE)
@ -160,7 +161,7 @@ public class JobConfigProvider {
* @param jobListener Job listener
*/
public void getJob(String jobId, ActionListener<Job.Builder> jobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() {
@Override
@ -197,7 +198,7 @@ public class JobConfigProvider {
* @param actionListener Deleted job listener
*/
public void deleteJob(String jobId, boolean errorIfMissing, ActionListener<DeleteResponse> actionListener) {
DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
DeleteRequest request = new DeleteRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() {
@ -233,7 +234,7 @@ public class JobConfigProvider {
*/
public void updateJob(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit,
ActionListener<Job> updatedJobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override
@ -297,7 +298,7 @@ public class JobConfigProvider {
*/
public void updateJobWithValidation(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit,
UpdateValidator validator, ActionListener<Job> updatedJobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override
@ -347,7 +348,7 @@ public class JobConfigProvider {
ActionListener<Job> updatedJobListener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder updatedSource = updatedJob.toXContent(builder, ToXContent.EMPTY_PARAMS);
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
IndexRequest indexRequest = new IndexRequest(MlConfigIndex.indexName())
.id(Job.documentId(updatedJob.getId()))
.source(updatedSource)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
@ -383,7 +384,7 @@ public class JobConfigProvider {
* @param listener Exists listener
*/
public void jobExists(String jobId, boolean errorIfMissing, ActionListener<Boolean> listener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
GetRequest getRequest = new GetRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
getRequest.fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE);
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@ -431,7 +432,7 @@ public class JobConfigProvider {
sourceBuilder.fetchSource(false);
sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder)
.setSize(ids.size())
@ -457,7 +458,7 @@ public class JobConfigProvider {
* @param listener Responds with true if successful else an error
*/
public void markJobAsDeleting(String jobId, ActionListener<Boolean> listener) {
UpdateRequest updateRequest = new UpdateRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
UpdateRequest updateRequest = new UpdateRequest(MlConfigIndex.indexName(), Job.documentId(jobId));
updateRequest.retryOnConflict(3);
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
updateRequest.doc(Collections.singletonMap(Job.DELETING.getPreferredName(), Boolean.TRUE));
@ -524,7 +525,7 @@ public class JobConfigProvider {
sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
@ -582,7 +583,7 @@ public class JobConfigProvider {
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildJobWildcardQuery(tokens, excludeDeleting));
sourceBuilder.sort(Job.ID.getPreferredName());
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
@ -640,7 +641,7 @@ public class JobConfigProvider {
sourceBuilder.fetchSource(false);
sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
@ -678,7 +679,7 @@ public class JobConfigProvider {
.query(boolQueryBuilder);
sourceBuilder.fetchSource(false);
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setSize(0)
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder).request();
@ -702,7 +703,7 @@ public class JobConfigProvider {
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder()
.query(QueryBuilders.nestedQuery(customRulesPath, QueryBuilders.existsQuery(customRulesPath), ScoreMode.None));
SearchRequest searchRequest = client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
SearchRequest searchRequest = client.prepareSearch(MlConfigIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(sourceBuilder)
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)

View File

@ -609,7 +609,7 @@ public class JobResultsProvider {
.add(createDocIdSearch(stateIndex, Quantiles.documentId(jobId)));
for (String filterId : job.getAnalysisConfig().extractReferencedFilters()) {
msearch.add(createDocIdSearch(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId)));
msearch.add(createDocIdSearch(MlMetaIndex.indexName(), MlFilter.documentId(filterId)));
}
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, msearch.request(),
@ -1310,7 +1310,7 @@ public class JobResultsProvider {
}
public void scheduledEvents(ScheduledEventsQueryBuilder query, ActionListener<QueryPage<ScheduledEvent>> handler) {
SearchRequestBuilder request = client.prepareSearch(MlMetaIndex.INDEX_NAME)
SearchRequestBuilder request = client.prepareSearch(MlMetaIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(query.build())
.setTrackTotalHits(true);
@ -1453,7 +1453,7 @@ public class JobResultsProvider {
currentJobs.removeAll(jobIdsToRemove);
Calendar updatedCalendar = new Calendar(calendar.getId(), new ArrayList<>(currentJobs), calendar.getDescription());
UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.INDEX_NAME, updatedCalendar.documentId());
UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.indexName(), updatedCalendar.documentId());
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
@ -1476,7 +1476,7 @@ public class JobResultsProvider {
}
public void calendars(CalendarQueryBuilder queryBuilder, ActionListener<QueryPage<Calendar>> listener) {
SearchRequest searchRequest = client.prepareSearch(MlMetaIndex.INDEX_NAME)
SearchRequest searchRequest = client.prepareSearch(MlMetaIndex.indexName())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setTrackTotalHits(true)
.setSource(queryBuilder.build()).request();
@ -1517,7 +1517,7 @@ public class JobResultsProvider {
.filter(jId -> jobId.equals(jId) == false)
.collect(Collectors.toList());
Calendar newCalendar = new Calendar(calendar.getId(), ids, calendar.getDescription());
UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.INDEX_NAME, newCalendar.documentId());
UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.indexName(), newCalendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
updateRequest.doc(newCalendar.toXContent(builder, ToXContent.EMPTY_PARAMS));
} catch (IOException e) {
@ -1541,7 +1541,7 @@ public class JobResultsProvider {
}
public void calendar(String calendarId, ActionListener<Calendar> listener) {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, Calendar.documentId(calendarId));
GetRequest getRequest = new GetRequest(MlMetaIndex.indexName(), Calendar.documentId(calendarId));
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getDocResponse) {

View File

@ -17,8 +17,8 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.utils.persistence.SearchAfterDocumentsIterator;
import java.io.IOException;
@ -29,7 +29,7 @@ public class SearchAfterJobsIterator extends SearchAfterDocumentsIterator<Job.Bu
private String lastJobId;
public SearchAfterJobsIterator(OriginSettingClient client) {
super(client, AnomalyDetectorsIndex.configIndexName());
super(client, MlConfigIndex.indexName());
}
@Override

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.DeleteByQueryAction;
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.xpack.core.ml.dataframe.analyses.Classification;
@ -109,7 +110,7 @@ public class UnusedStateRemover implements MlDataRemover {
// and remove cluster service as a member all together.
jobIds.addAll(MlMetadata.getMlMetadata(clusterService.state()).getJobs().keySet());
DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator(client, AnomalyDetectorsIndex.configIndexName(),
DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator(client, MlConfigIndex.indexName(),
QueryBuilders.termQuery(Job.JOB_TYPE.getPreferredName(), Job.ANOMALY_DETECTOR_JOB_TYPE));
while (iterator.hasNext()) {
Deque<String> docIds = iterator.next();
@ -121,7 +122,7 @@ public class UnusedStateRemover implements MlDataRemover {
private Set<String> getDataFrameAnalyticsJobIds() {
Set<String> jobIds = new HashSet<>();
DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator(client, AnomalyDetectorsIndex.configIndexName(),
DocIdBatchedDocumentIterator iterator = new DocIdBatchedDocumentIterator(client, MlConfigIndex.indexName(),
QueryBuilders.termQuery(DataFrameAnalyticsConfig.CONFIG_TYPE.getPreferredName(), DataFrameAnalyticsConfig.TYPE));
while (iterator.hasNext()) {
Deque<String> docIds = iterator.next();

View File

@ -28,6 +28,7 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.OpenJobAction;
@ -35,7 +36,6 @@ import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobTests;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.junit.Before;
import java.net.InetAddress;
@ -142,14 +142,14 @@ public class MlConfigMigrationEligibilityCheckTests extends ESTestCase {
}
private void addMlConfigIndex(Metadata.Builder metadata, RoutingTable.Builder routingTable) {
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(AnomalyDetectorsIndex.configIndexName());
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(MlConfigIndex.indexName());
indexMetadata.settings(Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
);
metadata.put(indexMetadata);
Index index = new Index(AnomalyDetectorsIndex.configIndexName(), "_uuid");
Index index = new Index(MlConfigIndex.indexName(), "_uuid");
ShardId shardId = new ShardId(index, 0);
ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""));

View File

@ -33,6 +33,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata.Assignment;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.OpenJobAction;
@ -237,9 +238,9 @@ public class TransportOpenJobActionTests extends ESTestCase {
private void addIndices(Metadata.Builder metadata, RoutingTable.Builder routingTable) {
List<String> indices = new ArrayList<>();
indices.add(AnomalyDetectorsIndex.configIndexName());
indices.add(MlConfigIndex.indexName());
indices.add(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX);
indices.add(MlMetaIndex.INDEX_NAME);
indices.add(MlMetaIndex.indexName());
indices.add(NotificationsIndex.NOTIFICATIONS_INDEX);
indices.add(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT);
for (String indexName : indices) {

View File

@ -21,7 +21,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import java.util.List;
@ -33,7 +33,7 @@ public class TransportStartDataFrameAnalyticsActionTests extends ESTestCase {
public void testVerifyIndicesPrimaryShardsAreActive() {
// At present the only critical index is the config index
String indexName = AnomalyDetectorsIndex.configIndexName();
String indexName = MlConfigIndex.indexName();
Metadata.Builder metadata = Metadata.builder();
RoutingTable.Builder routingTable = RoutingTable.builder();

View File

@ -42,6 +42,7 @@ import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.MachineLearningField;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.action.PutJobAction;
import org.elasticsearch.xpack.core.ml.action.UpdateJobAction;
@ -54,7 +55,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
import org.elasticsearch.xpack.core.ml.job.config.RuleScope;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck;
import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests;
@ -219,7 +219,7 @@ public class JobManagerTests extends ESTestCase {
docsAsBytes.add(toBytesReference(indexJobFoo.build()));
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
mockClientBuilder.prepareSearch(AnomalyDetectorsIndex.configIndexName(), docsAsBytes);
mockClientBuilder.prepareSearch(MlConfigIndex.indexName(), docsAsBytes);
JobManager jobManager = createJobManager(mockClientBuilder.build());
@ -366,7 +366,7 @@ public class JobManagerTests extends ESTestCase {
}).when(updateJobProcessNotifier).submitJobUpdate(any(), any());
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
mockClientBuilder.prepareSearch(AnomalyDetectorsIndex.configIndexName(), docsAsBytes);
mockClientBuilder.prepareSearch(MlConfigIndex.indexName(), docsAsBytes);
JobManager jobManager = createJobManager(mockClientBuilder.build());
MlFilter filter = MlFilter.builder("foo_filter").setItems("a", "b").build();
@ -417,7 +417,7 @@ public class JobManagerTests extends ESTestCase {
when(clusterService.state()).thenReturn(clusterState);
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
mockClientBuilder.prepareSearch(AnomalyDetectorsIndex.configIndexName(), docsAsBytes);
mockClientBuilder.prepareSearch(MlConfigIndex.indexName(), docsAsBytes);
JobManager jobManager = createJobManager(mockClientBuilder.build());
MlFilter filter = MlFilter.builder("foo_filter").build();
@ -453,7 +453,7 @@ public class JobManagerTests extends ESTestCase {
when(clusterService.state()).thenReturn(clusterState);
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
mockClientBuilder.prepareSearch(AnomalyDetectorsIndex.configIndexName(), docsAsBytes);
mockClientBuilder.prepareSearch(MlConfigIndex.indexName(), docsAsBytes);
JobManager jobManager = createJobManager(mockClientBuilder.build());
MlFilter filter = MlFilter.builder("foo_filter").build();
@ -474,14 +474,14 @@ public class JobManagerTests extends ESTestCase {
Metadata.Builder metadata = Metadata.builder();
RoutingTable.Builder routingTable = RoutingTable.builder();
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(AnomalyDetectorsIndex.configIndexName());
IndexMetadata.Builder indexMetadata = IndexMetadata.builder(MlConfigIndex.indexName());
indexMetadata.settings(Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
);
metadata.put(indexMetadata);
Index index = new Index(AnomalyDetectorsIndex.configIndexName(), "_uuid");
Index index = new Index(MlConfigIndex.indexName(), "_uuid");
ShardId shardId = new ShardId(index, 0);
ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""));
@ -521,7 +521,7 @@ public class JobManagerTests extends ESTestCase {
MockClientBuilder mockClientBuilder = new MockClientBuilder("cluster-test");
// For the JobConfigProvider expand groups search.
// The search will not return any results
mockClientBuilder.prepareSearchFields(AnomalyDetectorsIndex.configIndexName(), Collections.emptyList());
mockClientBuilder.prepareSearchFields(MlConfigIndex.indexName(), Collections.emptyList());
JobManager jobManager = createJobManager(mockClientBuilder.build());
@ -564,7 +564,7 @@ public class JobManagerTests extends ESTestCase {
new DocumentField(Job.ID.getPreferredName(), Collections.singletonList("job-2"))));
mockClientBuilder.prepareSearchFields(AnomalyDetectorsIndex.configIndexName(), fieldHits);
mockClientBuilder.prepareSearchFields(MlConfigIndex.indexName(), fieldHits);
JobManager jobManager = createJobManager(mockClientBuilder.build());
jobManager.updateProcessOnCalendarChanged(Collections.singletonList("group-1"),

View File

@ -19,6 +19,7 @@ import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
import org.elasticsearch.xpack.core.ml.MlConfigIndex;
import org.elasticsearch.xpack.core.ml.MlMetaIndex;
import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
@ -86,10 +87,10 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase {
templates.addAll(
Arrays.asList(
NotificationsIndex.NOTIFICATIONS_INDEX,
MlMetaIndex.INDEX_NAME,
MlMetaIndex.indexName(),
AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX,
AnomalyDetectorsIndex.jobResultsIndexPrefix(),
AnomalyDetectorsIndex.configIndexName(),
MlConfigIndex.indexName(),
TransformInternalIndexConstants.AUDIT_INDEX,
TransformInternalIndexConstants.LATEST_INDEX_NAME
));