[ML] Transition to typeless (mapping) APIs (#39573)

ML has historically used doc as the single mapping type but reindex in 7.x
will change the mapping to _doc. Switching to the typeless APIs handles 
case where the mapping type is either doc or _doc. This change removes
deprecated typed usages.
This commit is contained in:
David Kyle 2019-03-04 13:52:05 +00:00 committed by GitHub
parent 9ddaabba88
commit a58145f9e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
52 changed files with 225 additions and 371 deletions

View File

@ -13,6 +13,7 @@ import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import java.io.IOException; import java.io.IOException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
public final class MlMetaIndex { public final class MlMetaIndex {
/** /**
@ -21,14 +22,12 @@ public final class MlMetaIndex {
*/ */
public static final String INDEX_NAME = ".ml-meta"; public static final String INDEX_NAME = ".ml-meta";
public static final String TYPE = "doc";
private MlMetaIndex() {} private MlMetaIndex() {}
public static XContentBuilder docMapping() throws IOException { public static XContentBuilder docMapping() throws IOException {
XContentBuilder builder = jsonBuilder(); XContentBuilder builder = jsonBuilder();
builder.startObject(); builder.startObject();
builder.startObject(TYPE); builder.startObject(SINGLE_MAPPING_NAME);
ElasticsearchMappings.addMetaInformation(builder); ElasticsearchMappings.addMetaInformation(builder);
ElasticsearchMappings.addDefaultMapping(builder); ElasticsearchMappings.addDefaultMapping(builder);
builder.startObject(ElasticsearchMappings.PROPERTIES) builder.startObject(ElasticsearchMappings.PROPERTIES)

View File

@ -27,6 +27,7 @@ import java.io.IOException;
import java.util.SortedMap; import java.util.SortedMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
@ -71,7 +72,7 @@ public class AnnotationIndex {
CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX_NAME); CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX_NAME);
try (XContentBuilder annotationsMapping = AnnotationIndex.annotationsMapping()) { try (XContentBuilder annotationsMapping = AnnotationIndex.annotationsMapping()) {
createIndexRequest.mapping(ElasticsearchMappings.DOC_TYPE, annotationsMapping); createIndexRequest.mapping(SINGLE_MAPPING_NAME, annotationsMapping);
createIndexRequest.settings(Settings.builder() createIndexRequest.settings(Settings.builder()
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1")
@ -111,7 +112,7 @@ public class AnnotationIndex {
public static XContentBuilder annotationsMapping() throws IOException { public static XContentBuilder annotationsMapping() throws IOException {
XContentBuilder builder = jsonBuilder() XContentBuilder builder = jsonBuilder()
.startObject() .startObject()
.startObject(ElasticsearchMappings.DOC_TYPE); .startObject(SINGLE_MAPPING_NAME);
ElasticsearchMappings.addMetaInformation(builder); ElasticsearchMappings.addMetaInformation(builder);
builder.startObject(ElasticsearchMappings.PROPERTIES) builder.startObject(ElasticsearchMappings.PROPERTIES)
.startObject(Annotation.ANNOTATION.getPreferredName()) .startObject(Annotation.ANNOTATION.getPreferredName())

View File

@ -18,7 +18,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.AliasOrIndex;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.CheckedBiFunction;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
@ -62,6 +62,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
@ -86,8 +87,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
*/ */
public class ElasticsearchMappings { public class ElasticsearchMappings {
public static final String DOC_TYPE = "doc";
/** /**
* String constants used in mappings * String constants used in mappings
*/ */
@ -137,7 +136,7 @@ public class ElasticsearchMappings {
public static XContentBuilder configMapping() throws IOException { public static XContentBuilder configMapping() throws IOException {
XContentBuilder builder = jsonBuilder(); XContentBuilder builder = jsonBuilder();
builder.startObject(); builder.startObject();
builder.startObject(DOC_TYPE); builder.startObject(SINGLE_MAPPING_NAME);
addMetaInformation(builder); addMetaInformation(builder);
addDefaultMapping(builder); addDefaultMapping(builder);
builder.startObject(PROPERTIES); builder.startObject(PROPERTIES);
@ -420,14 +419,14 @@ public class ElasticsearchMappings {
.endObject(); .endObject();
} }
public static XContentBuilder resultsMapping() throws IOException { public static XContentBuilder resultsMapping(String mappingType) throws IOException {
return resultsMapping(Collections.emptyList()); return resultsMapping(mappingType, Collections.emptyList());
} }
public static XContentBuilder resultsMapping(Collection<String> extraTermFields) throws IOException { public static XContentBuilder resultsMapping(String mappingType, Collection<String> extraTermFields) throws IOException {
XContentBuilder builder = jsonBuilder(); XContentBuilder builder = jsonBuilder();
builder.startObject(); builder.startObject();
builder.startObject(DOC_TYPE); builder.startObject(mappingType);
addMetaInformation(builder); addMetaInformation(builder);
addDefaultMapping(builder); addDefaultMapping(builder);
builder.startObject(PROPERTIES); builder.startObject(PROPERTIES);
@ -456,10 +455,11 @@ public class ElasticsearchMappings {
// end properties // end properties
builder.endObject(); builder.endObject();
// end type
builder.endObject();
// end mapping // end mapping
builder.endObject(); builder.endObject();
// end doc
builder.endObject();
return builder; return builder;
} }
@ -575,18 +575,25 @@ public class ElasticsearchMappings {
addModelSizeStatsFieldsToMapping(builder); addModelSizeStatsFieldsToMapping(builder);
} }
public static XContentBuilder termFieldsMapping(String type, Collection<String> termFields) { /**
* Generate a keyword mapping for {@code termFields} for the default type
* {@link org.elasticsearch.index.mapper.MapperService#SINGLE_MAPPING_NAME}
*
* If the returned mapping is used in index creation and the new index has a matching template
* then the mapping type ({@link org.elasticsearch.index.mapper.MapperService#SINGLE_MAPPING_NAME})
* must match the mapping type of the template otherwise the mappings will not be merged correctly.
*
* @param termFields Fields to generate mapping for
* @return The mapping
*/
public static XContentBuilder termFieldsMapping(Collection<String> termFields) {
try { try {
XContentBuilder builder = jsonBuilder().startObject(); XContentBuilder builder = jsonBuilder().startObject();
if (type != null) { builder.startObject(SINGLE_MAPPING_NAME);
builder.startObject(type);
}
builder.startObject(PROPERTIES); builder.startObject(PROPERTIES);
addTermFields(builder, termFields); addTermFields(builder, termFields);
builder.endObject(); builder.endObject();
if (type != null) {
builder.endObject(); builder.endObject();
}
return builder.endObject(); return builder.endObject();
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
@ -872,7 +879,7 @@ public class ElasticsearchMappings {
public static XContentBuilder stateMapping() throws IOException { public static XContentBuilder stateMapping() throws IOException {
XContentBuilder builder = jsonBuilder(); XContentBuilder builder = jsonBuilder();
builder.startObject(); builder.startObject();
builder.startObject(DOC_TYPE); builder.startObject(SINGLE_MAPPING_NAME);
addMetaInformation(builder); addMetaInformation(builder);
builder.field(ENABLED, false); builder.field(ENABLED, false);
builder.endObject(); builder.endObject();
@ -960,8 +967,8 @@ public class ElasticsearchMappings {
} }
public static XContentBuilder auditMessageMapping() throws IOException { public static XContentBuilder auditMessageMapping() throws IOException {
XContentBuilder builder = jsonBuilder().startObject() XContentBuilder builder = jsonBuilder().startObject();
.startObject(AuditMessage.TYPE.getPreferredName()); builder.startObject(SINGLE_MAPPING_NAME);
addMetaInformation(builder); addMetaInformation(builder);
builder.startObject(PROPERTIES) builder.startObject(PROPERTIES)
.startObject(Job.ID.getPreferredName()) .startObject(Job.ID.getPreferredName())
@ -987,6 +994,7 @@ public class ElasticsearchMappings {
.endObject() .endObject()
.endObject() .endObject()
.endObject(); .endObject();
return builder; return builder;
} }
@ -994,12 +1002,12 @@ public class ElasticsearchMappings {
List<String> indicesToUpdate = new ArrayList<>(); List<String> indicesToUpdate = new ArrayList<>();
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> currentMapping = state.metaData().findMappings(concreteIndices, ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> currentMapping = state.metaData().findMappings(concreteIndices,
new String[] {DOC_TYPE}, MapperPlugin.NOOP_FIELD_FILTER); new String[0], MapperPlugin.NOOP_FIELD_FILTER);
for (String index : concreteIndices) { for (String index : concreteIndices) {
ImmutableOpenMap<String, MappingMetaData> innerMap = currentMapping.get(index); ImmutableOpenMap<String, MappingMetaData> innerMap = currentMapping.get(index);
if (innerMap != null) { if (innerMap != null) {
MappingMetaData metaData = innerMap.get(DOC_TYPE); MappingMetaData metaData = innerMap.valuesIt().next();
try { try {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Map<String, Object> meta = (Map<String, Object>) metaData.sourceAsMap().get("_meta"); Map<String, Object> meta = (Map<String, Object>) metaData.sourceAsMap().get("_meta");
@ -1038,7 +1046,8 @@ public class ElasticsearchMappings {
return indicesToUpdate.toArray(new String[indicesToUpdate.size()]); return indicesToUpdate.toArray(new String[indicesToUpdate.size()]);
} }
public static void addDocMappingIfMissing(String alias, CheckedSupplier<XContentBuilder, IOException> mappingSupplier, public static void addDocMappingIfMissing(String alias,
CheckedBiFunction<String, Collection<String>, XContentBuilder, IOException> mappingSupplier,
Client client, ClusterState state, ActionListener<Boolean> listener) { Client client, ClusterState state, ActionListener<Boolean> listener) {
AliasOrIndex aliasOrIndex = state.metaData().getAliasAndIndexLookup().get(alias); AliasOrIndex aliasOrIndex = state.metaData().getAliasAndIndexLookup().get(alias);
if (aliasOrIndex == null) { if (aliasOrIndex == null) {
@ -1058,9 +1067,13 @@ public class ElasticsearchMappings {
} }
if (indicesThatRequireAnUpdate.length > 0) { if (indicesThatRequireAnUpdate.length > 0) {
try (XContentBuilder mapping = mappingSupplier.get()) { // Use the mapping type of the first index in the update
IndexMetaData indexMetaData = state.metaData().index(indicesThatRequireAnUpdate[0]);
String mappingType = indexMetaData.mapping().type();
try (XContentBuilder mapping = mappingSupplier.apply(mappingType, Collections.emptyList())) {
PutMappingRequest putMappingRequest = new PutMappingRequest(indicesThatRequireAnUpdate); PutMappingRequest putMappingRequest = new PutMappingRequest(indicesThatRequireAnUpdate);
putMappingRequest.type(DOC_TYPE); putMappingRequest.type(mappingType);
putMappingRequest.source(mapping); putMappingRequest.source(mapping);
executeAsyncWithOrigin(client, ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, executeAsyncWithOrigin(client, ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest,
ActionListener.wrap(response -> { ActionListener.wrap(response -> {

View File

@ -22,7 +22,7 @@ import java.util.Date;
import java.util.Objects; import java.util.Objects;
public class AuditMessage implements ToXContentObject, Writeable { public class AuditMessage implements ToXContentObject, Writeable {
public static final ParseField TYPE = new ParseField("audit_message"); private static final ParseField TYPE = new ParseField("audit_message");
public static final ParseField MESSAGE = new ParseField("message"); public static final ParseField MESSAGE = new ParseField("message");
public static final ParseField LEVEL = new ParseField("level"); public static final ParseField LEVEL = new ParseField("level");

View File

@ -45,6 +45,8 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
public class ElasticsearchMappingsTests extends ESTestCase { public class ElasticsearchMappingsTests extends ESTestCase {
@ -117,11 +119,12 @@ public class ElasticsearchMappingsTests extends ESTestCase {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public void testTermFieldMapping() throws IOException { public void testTermFieldMapping() throws IOException {
XContentBuilder builder = ElasticsearchMappings.termFieldsMapping(null, Arrays.asList("apple", "strawberry", XContentBuilder builder = ElasticsearchMappings.termFieldsMapping(Arrays.asList("apple", "strawberry",
AnomalyRecord.BUCKET_SPAN.getPreferredName())); AnomalyRecord.BUCKET_SPAN.getPreferredName()));
XContentParser parser = createParser(builder); XContentParser parser = createParser(builder);
Map<String, Object> properties = (Map<String, Object>) parser.map().get(ElasticsearchMappings.PROPERTIES); Map<String, Object> mapping = (Map<String, Object>) parser.map().get(SINGLE_MAPPING_NAME);
Map<String, Object> properties = (Map<String, Object>) mapping.get(ElasticsearchMappings.PROPERTIES);
Map<String, Object> instanceMapping = (Map<String, Object>) properties.get("apple"); Map<String, Object> instanceMapping = (Map<String, Object>) properties.get("apple");
assertNotNull(instanceMapping); assertNotNull(instanceMapping);
@ -217,7 +220,7 @@ public class ElasticsearchMappingsTests extends ESTestCase {
} }
mapping.put("_meta", meta); mapping.put("_meta", meta);
indexMetaData.putMapping(new MappingMetaData(ElasticsearchMappings.DOC_TYPE, mapping)); indexMetaData.putMapping(new MappingMetaData("_doc", mapping));
metaDataBuilder.put(indexMetaData); metaDataBuilder.put(indexMetaData);
} }
@ -230,7 +233,7 @@ public class ElasticsearchMappingsTests extends ESTestCase {
private Set<String> collectResultsDocFieldNames() throws IOException { private Set<String> collectResultsDocFieldNames() throws IOException {
// Only the mappings for the results index should be added below. Do NOT add mappings for other indexes here. // Only the mappings for the results index should be added below. Do NOT add mappings for other indexes here.
return collectFieldNames(ElasticsearchMappings.resultsMapping()); return collectFieldNames(ElasticsearchMappings.resultsMapping("_doc"));
} }
private Set<String> collectConfigDocFieldNames() throws IOException { private Set<String> collectConfigDocFieldNames() throws IOException {

View File

@ -26,6 +26,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ -35,38 +36,37 @@ import static org.hamcrest.Matchers.is;
public class CategorizationIT extends MlNativeAutodetectIntegTestCase { public class CategorizationIT extends MlNativeAutodetectIntegTestCase {
private static final String DATA_INDEX = "log-data"; private static final String DATA_INDEX = "log-data";
private static final String DATA_TYPE = "log";
private long nowMillis; private long nowMillis;
@Before @Before
public void setUpData() { public void setUpData() {
client().admin().indices().prepareCreate(DATA_INDEX) client().admin().indices().prepareCreate(DATA_INDEX)
.addMapping(DATA_TYPE, "time", "type=date,format=epoch_millis", .addMapping(SINGLE_MAPPING_NAME, "time", "type=date,format=epoch_millis",
"msg", "type=text") "msg", "type=text")
.get(); .get();
nowMillis = System.currentTimeMillis(); nowMillis = System.currentTimeMillis();
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk();
IndexRequest indexRequest = new IndexRequest(DATA_INDEX, DATA_TYPE); IndexRequest indexRequest = new IndexRequest(DATA_INDEX);
indexRequest.source("time", nowMillis - TimeValue.timeValueHours(2).millis(), indexRequest.source("time", nowMillis - TimeValue.timeValueHours(2).millis(),
"msg", "Node 1 started"); "msg", "Node 1 started");
bulkRequestBuilder.add(indexRequest); bulkRequestBuilder.add(indexRequest);
indexRequest = new IndexRequest(DATA_INDEX, DATA_TYPE); indexRequest = new IndexRequest(DATA_INDEX);
indexRequest.source("time", nowMillis - TimeValue.timeValueHours(2).millis() + 1, indexRequest.source("time", nowMillis - TimeValue.timeValueHours(2).millis() + 1,
"msg", "Failed to shutdown [error org.aaaa.bbbb.Cccc line 54 caused " + "msg", "Failed to shutdown [error org.aaaa.bbbb.Cccc line 54 caused " +
"by foo exception]"); "by foo exception]");
bulkRequestBuilder.add(indexRequest); bulkRequestBuilder.add(indexRequest);
indexRequest = new IndexRequest(DATA_INDEX, DATA_TYPE); indexRequest = new IndexRequest(DATA_INDEX);
indexRequest.source("time", nowMillis - TimeValue.timeValueHours(1).millis(), indexRequest.source("time", nowMillis - TimeValue.timeValueHours(1).millis(),
"msg", "Node 2 started"); "msg", "Node 2 started");
bulkRequestBuilder.add(indexRequest); bulkRequestBuilder.add(indexRequest);
indexRequest = new IndexRequest(DATA_INDEX, DATA_TYPE); indexRequest = new IndexRequest(DATA_INDEX);
indexRequest.source("time", nowMillis - TimeValue.timeValueHours(1).millis() + 1, indexRequest.source("time", nowMillis - TimeValue.timeValueHours(1).millis() + 1,
"msg", "Failed to shutdown [error but this time completely different]"); "msg", "Failed to shutdown [error but this time completely different]");
bulkRequestBuilder.add(indexRequest); bulkRequestBuilder.add(indexRequest);
indexRequest = new IndexRequest(DATA_INDEX, DATA_TYPE); indexRequest = new IndexRequest(DATA_INDEX);
indexRequest.source("time", nowMillis, "msg", "Node 3 started"); indexRequest.source("time", nowMillis, "msg", "Node 3 started");
bulkRequestBuilder.add(indexRequest); bulkRequestBuilder.add(indexRequest);

View File

@ -308,7 +308,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
client().performRequest(createJobRequest); client().performRequest(createJobRequest);
String datafeedId = jobId + "-datafeed"; String datafeedId = jobId + "-datafeed";
new DatafeedBuilder(datafeedId, jobId, "nested-data", "response").build(); new DatafeedBuilder(datafeedId, jobId, "nested-data").build();
openJob(client(), jobId); openJob(client(), jobId);
startDatafeedAndWaitUntilStopped(datafeedId); startDatafeedAndWaitUntilStopped(datafeedId);
@ -351,7 +351,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
// create a datafeed they DON'T have permission to search the index the datafeed is // create a datafeed they DON'T have permission to search the index the datafeed is
// configured to read // configured to read
ResponseException e = expectThrows(ResponseException.class, () -> ResponseException e = expectThrows(ResponseException.class, () ->
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response") new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs")
.setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN) .setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN)
.build()); .build());
@ -419,7 +419,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
ResponseException e = expectThrows(ResponseException.class, () -> ResponseException e = expectThrows(ResponseException.class, () ->
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup", "doc") new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup")
.setAggregations(aggregations) .setAggregations(aggregations)
.setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS) //want to search, but no admin access .setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS) //want to search, but no admin access
.build()); .build());
@ -449,7 +449,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
client().performRequest(createJobRequest); client().performRequest(createJobRequest);
String datafeedId = "datafeed-" + jobId; String datafeedId = "datafeed-" + jobId;
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").build(); new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs").build();
// This should be disallowed, because ml_admin is trying to preview a datafeed created by // This should be disallowed, because ml_admin is trying to preview a datafeed created by
// by another user (x_pack_rest_user in this case) that will reveal the content of an index they // by another user (x_pack_rest_user in this case) that will reveal the content of an index they
@ -490,7 +490,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}},"
+ "\"airline\":{\"terms\":{\"field\":\"airline\",\"size\":10}," + "\"airline\":{\"terms\":{\"field\":\"airline\",\"size\":10},"
+ " \"aggregations\":{\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}}}"; + " \"aggregations\":{\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}}}";
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").setAggregations(aggregations).build(); new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs").setAggregations(aggregations).build();
openJob(client(), jobId); openJob(client(), jobId);
startDatafeedAndWaitUntilStopped(datafeedId); startDatafeedAndWaitUntilStopped(datafeedId);
@ -529,7 +529,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}},"
+ "\"airline\":{\"terms\":{\"field\":\"airline\",\"size\":10}," + "\"airline\":{\"terms\":{\"field\":\"airline\",\"size\":10},"
+ " \"aggregations\":{\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}}}"; + " \"aggregations\":{\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}}}";
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").setAggregations(aggregations).build(); new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs").setAggregations(aggregations).build();
openJob(client(), jobId); openJob(client(), jobId);
startDatafeedAndWaitUntilStopped(datafeedId); startDatafeedAndWaitUntilStopped(datafeedId);
@ -568,7 +568,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}}," + "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}},"
+ "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}}," + "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}},"
+ "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}"; + "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}";
new DatafeedBuilder(datafeedId, jobId, "network-data", "doc") new DatafeedBuilder(datafeedId, jobId, "network-data")
.setAggregations(aggregations) .setAggregations(aggregations)
.setChunkingTimespan("300s") .setChunkingTimespan("300s")
.build(); .build();
@ -614,7 +614,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}}," + "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}},"
+ "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}}," + "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}},"
+ "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}"; + "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}";
new DatafeedBuilder(datafeedId, jobId, "network-data", "doc") new DatafeedBuilder(datafeedId, jobId, "network-data")
.setAggregations(aggregations) .setAggregations(aggregations)
.setChunkingTimespan("300s") .setChunkingTimespan("300s")
.build(); .build();
@ -658,7 +658,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}"; + "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}";
// At the time we create the datafeed the user can access the network-data index that we have access to // At the time we create the datafeed the user can access the network-data index that we have access to
new DatafeedBuilder(datafeedId, jobId, "network-data", "doc") new DatafeedBuilder(datafeedId, jobId, "network-data")
.setAggregations(aggregations) .setAggregations(aggregations)
.setChunkingTimespan("300s") .setChunkingTimespan("300s")
.setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS) .setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS)
@ -712,7 +712,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ "\"airlines\":{\"terms\":{\"field\":\"airline.keyword\",\"size\":10}}," + "\"airlines\":{\"terms\":{\"field\":\"airline.keyword\",\"size\":10}},"
+ "\"percentile95_airlines_count\":{\"percentiles_bucket\":" + + "\"percentile95_airlines_count\":{\"percentiles_bucket\":" +
"{\"buckets_path\":\"airlines._count\", \"percents\": [95]}}}}}"; "{\"buckets_path\":\"airlines._count\", \"percents\": [95]}}}}}";
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").setAggregations(aggregations).build(); new DatafeedBuilder(datafeedId, jobId, "airline-data").setAggregations(aggregations).build();
openJob(client(), jobId); openJob(client(), jobId);
@ -801,7 +801,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ "\"aggregations\":{" + "\"aggregations\":{"
+ "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}},"
+ "\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}"; + "\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}";
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup", "response").setAggregations(aggregations).build(); new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup").setAggregations(aggregations).build();
openJob(client(), jobId); openJob(client(), jobId);
startDatafeedAndWaitUntilStopped(datafeedId); startDatafeedAndWaitUntilStopped(datafeedId);
@ -872,7 +872,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
// At the time we create the datafeed the user can access the network-data index that we have access to // At the time we create the datafeed the user can access the network-data index that we have access to
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup", "doc") new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup")
.setAggregations(aggregations) .setAggregations(aggregations)
.setChunkingTimespan("300s") .setChunkingTimespan("300s")
.setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS) .setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS)
@ -919,7 +919,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
+ "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}},"
+ "\"airlineFilter\":{\"filter\":{\"term\": {\"airline\":\"AAA\"}}," + "\"airlineFilter\":{\"filter\":{\"term\": {\"airline\":\"AAA\"}},"
+ " \"aggregations\":{\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}}}"; + " \"aggregations\":{\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}}}";
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").setAggregations(aggregations).build(); new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs").setAggregations(aggregations).build();
openJob(client(), jobId); openJob(client(), jobId);
startDatafeedAndWaitUntilStopped(datafeedId); startDatafeedAndWaitUntilStopped(datafeedId);
@ -936,7 +936,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
String jobId = "job-realtime-1"; String jobId = "job-realtime-1";
createJob(jobId, "airline"); createJob(jobId, "airline");
String datafeedId = jobId + "-datafeed"; String datafeedId = jobId + "-datafeed";
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").build(); new DatafeedBuilder(datafeedId, jobId, "airline-data").build();
openJob(client(), jobId); openJob(client(), jobId);
Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start"); Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start");
@ -994,7 +994,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
String jobId = "job-realtime-2"; String jobId = "job-realtime-2";
createJob(jobId, "airline"); createJob(jobId, "airline");
String datafeedId = jobId + "-datafeed"; String datafeedId = jobId + "-datafeed";
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").build(); new DatafeedBuilder(datafeedId, jobId, "airline-data").build();
openJob(client(), jobId); openJob(client(), jobId);
Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start"); Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start");
@ -1059,7 +1059,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
public void execute() throws Exception { public void execute() throws Exception {
createJob(jobId, airlineVariant); createJob(jobId, airlineVariant);
String datafeedId = "datafeed-" + jobId; String datafeedId = "datafeed-" + jobId;
new DatafeedBuilder(datafeedId, jobId, dataIndex, "response") new DatafeedBuilder(datafeedId, jobId, dataIndex)
.setScriptedFields(addScriptedFields ? .setScriptedFields(addScriptedFields ?
"{\"airline\":{\"script\":{\"lang\":\"painless\",\"inline\":\"doc['airline'].value\"}}}" : null) "{\"airline\":{\"script\":{\"lang\":\"painless\",\"inline\":\"doc['airline'].value\"}}}" : null)
.build(); .build();
@ -1159,18 +1159,16 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
String datafeedId; String datafeedId;
String jobId; String jobId;
String index; String index;
String type;
boolean source; boolean source;
String scriptedFields; String scriptedFields;
String aggregations; String aggregations;
String authHeader = BASIC_AUTH_VALUE_SUPER_USER; String authHeader = BASIC_AUTH_VALUE_SUPER_USER;
String chunkingTimespan; String chunkingTimespan;
DatafeedBuilder(String datafeedId, String jobId, String index, String type) { DatafeedBuilder(String datafeedId, String jobId, String index) {
this.datafeedId = datafeedId; this.datafeedId = datafeedId;
this.jobId = jobId; this.jobId = jobId;
this.index = index; this.index = index;
this.type = type;
} }
DatafeedBuilder setSource(boolean enableSource) { DatafeedBuilder setSource(boolean enableSource) {

View File

@ -40,6 +40,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@ -49,12 +50,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
private static final String DATA_INDEX = "delete-expired-data-test-data"; private static final String DATA_INDEX = "delete-expired-data-test-data";
private static final String DATA_TYPE = "doc";
@Before @Before
public void setUpData() throws IOException { public void setUpData() throws IOException {
client().admin().indices().prepareCreate(DATA_INDEX) client().admin().indices().prepareCreate(DATA_INDEX)
.addMapping(DATA_TYPE, "time", "type=date,format=epoch_millis") .addMapping(SINGLE_MAPPING_NAME, "time", "type=date,format=epoch_millis")
.get(); .get();
// We are going to create data for last 2 days // We are going to create data for last 2 days
@ -68,7 +68,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
long timestamp = nowMillis - TimeValue.timeValueHours(totalBuckets - bucket).getMillis(); long timestamp = nowMillis - TimeValue.timeValueHours(totalBuckets - bucket).getMillis();
int bucketRate = bucket == anomalousBucket ? anomalousRate : normalRate; int bucketRate = bucket == anomalousBucket ? anomalousRate : normalRate;
for (int point = 0; point < bucketRate; point++) { for (int point = 0; point < bucketRate; point++) {
IndexRequest indexRequest = new IndexRequest(DATA_INDEX, DATA_TYPE); IndexRequest indexRequest = new IndexRequest(DATA_INDEX);
indexRequest.source("time", timestamp); indexRequest.source("time", timestamp);
bulkRequestBuilder.add(indexRequest); bulkRequestBuilder.add(indexRequest);
} }
@ -98,7 +98,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < 10010; i++) { for (int i = 0; i < 10010; i++) {
String docId = "non_existing_job_" + randomFrom("model_state_1234567#" + i, "quantiles", "categorizer_state#" + i); String docId = "non_existing_job_" + randomFrom("model_state_1234567#" + i, "quantiles", "categorizer_state#" + i);
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), "doc", docId); IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias()).id(docId);
indexRequest.source(Collections.emptyMap()); indexRequest.source(Collections.emptyMap());
bulkRequestBuilder.add(indexRequest); bulkRequestBuilder.add(indexRequest);
} }
@ -145,7 +145,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
// Update snapshot timestamp to force it out of snapshot retention window // Update snapshot timestamp to force it out of snapshot retention window
String snapshotUpdate = "{ \"timestamp\": " + oneDayAgo + "}"; String snapshotUpdate = "{ \"timestamp\": " + oneDayAgo + "}";
UpdateRequest updateSnapshotRequest = new UpdateRequest(".ml-anomalies-" + job.getId(), "doc", snapshotDocId); UpdateRequest updateSnapshotRequest = new UpdateRequest(".ml-anomalies-" + job.getId(), snapshotDocId);
updateSnapshotRequest.doc(snapshotUpdate.getBytes(StandardCharsets.UTF_8), XContentType.JSON); updateSnapshotRequest.doc(snapshotUpdate.getBytes(StandardCharsets.UTF_8), XContentType.JSON);
client().execute(UpdateAction.INSTANCE, updateSnapshotRequest).get(); client().execute(UpdateAction.INSTANCE, updateSnapshotRequest).get();

View File

@ -28,6 +28,7 @@ import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ -36,12 +37,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class ModelPlotsIT extends MlNativeAutodetectIntegTestCase { public class ModelPlotsIT extends MlNativeAutodetectIntegTestCase {
private static final String DATA_INDEX = "model-plots-test-data"; private static final String DATA_INDEX = "model-plots-test-data";
private static final String DATA_TYPE = "doc";
@Before @Before
public void setUpData() { public void setUpData() {
client().admin().indices().prepareCreate(DATA_INDEX) client().admin().indices().prepareCreate(DATA_INDEX)
.addMapping(DATA_TYPE, "time", "type=date,format=epoch_millis", "user", "type=keyword") .addMapping(SINGLE_MAPPING_NAME, "time", "type=date,format=epoch_millis", "user", "type=keyword")
.get(); .get();
List<String> users = Arrays.asList("user_1", "user_2", "user_3"); List<String> users = Arrays.asList("user_1", "user_2", "user_3");
@ -53,7 +53,7 @@ public class ModelPlotsIT extends MlNativeAutodetectIntegTestCase {
for (int bucket = 0; bucket < totalBuckets; bucket++) { for (int bucket = 0; bucket < totalBuckets; bucket++) {
long timestamp = nowMillis - TimeValue.timeValueHours(totalBuckets - bucket).getMillis(); long timestamp = nowMillis - TimeValue.timeValueHours(totalBuckets - bucket).getMillis();
for (String user : users) { for (String user : users) {
IndexRequest indexRequest = new IndexRequest(DATA_INDEX, DATA_TYPE); IndexRequest indexRequest = new IndexRequest(DATA_INDEX);
indexRequest.source("time", timestamp, "user", user); indexRequest.source("time", timestamp, "user", user);
bulkRequestBuilder.add(indexRequest); bulkRequestBuilder.add(indexRequest);
} }

View File

@ -111,7 +111,6 @@ import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.notifications.AuditMessage;
import org.elasticsearch.xpack.core.ml.notifications.AuditorField; import org.elasticsearch.xpack.core.ml.notifications.AuditorField;
import org.elasticsearch.xpack.core.template.TemplateUtils; import org.elasticsearch.xpack.core.template.TemplateUtils;
import org.elasticsearch.xpack.ml.action.TransportCloseJobAction; import org.elasticsearch.xpack.ml.action.TransportCloseJobAction;
@ -250,6 +249,7 @@ import java.util.function.Supplier;
import java.util.function.UnaryOperator; import java.util.function.UnaryOperator;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlugin, PersistentTaskPlugin { public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlugin, PersistentTaskPlugin {
public static final String NAME = "ml"; public static final String NAME = "ml";
@ -650,7 +650,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
try (XContentBuilder auditMapping = ElasticsearchMappings.auditMessageMapping()) { try (XContentBuilder auditMapping = ElasticsearchMappings.auditMessageMapping()) {
IndexTemplateMetaData notificationMessageTemplate = IndexTemplateMetaData.builder(AuditorField.NOTIFICATIONS_INDEX) IndexTemplateMetaData notificationMessageTemplate = IndexTemplateMetaData.builder(AuditorField.NOTIFICATIONS_INDEX)
.putMapping(AuditMessage.TYPE.getPreferredName(), Strings.toString(auditMapping)) .putMapping(SINGLE_MAPPING_NAME, Strings.toString(auditMapping))
.patterns(Collections.singletonList(AuditorField.NOTIFICATIONS_INDEX)) .patterns(Collections.singletonList(AuditorField.NOTIFICATIONS_INDEX))
.version(Version.CURRENT.id) .version(Version.CURRENT.id)
.settings(Settings.builder() .settings(Settings.builder()
@ -675,7 +675,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)) .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting))
.version(Version.CURRENT.id) .version(Version.CURRENT.id)
.putMapping(MlMetaIndex.TYPE, Strings.toString(docMapping)) .putMapping(SINGLE_MAPPING_NAME, Strings.toString(docMapping))
.build(); .build();
templates.put(MlMetaIndex.INDEX_NAME, metaTemplate); templates.put(MlMetaIndex.INDEX_NAME, metaTemplate);
} catch (IOException e) { } catch (IOException e) {
@ -694,7 +694,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
.put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), .put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(),
AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)) AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW))
.version(Version.CURRENT.id) .version(Version.CURRENT.id)
.putMapping(ElasticsearchMappings.DOC_TYPE, Strings.toString(configMapping)) .putMapping(SINGLE_MAPPING_NAME, Strings.toString(configMapping))
.build(); .build();
templates.put(AnomalyDetectorsIndex.configIndexName(), configTemplate); templates.put(AnomalyDetectorsIndex.configIndexName(), configTemplate);
} catch (IOException e) { } catch (IOException e) {
@ -708,15 +708,16 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
.settings(Settings.builder() .settings(Settings.builder()
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)) .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting))
.putMapping(ElasticsearchMappings.DOC_TYPE, Strings.toString(stateMapping)) .putMapping(SINGLE_MAPPING_NAME, Strings.toString(stateMapping))
.version(Version.CURRENT.id) .version(Version.CURRENT.id)
.build(); .build();
templates.put(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, stateTemplate); templates.put(AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX, stateTemplate);
} catch (IOException e) { } catch (IOException e) {
logger.error("Error loading the template for the " + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + " index", e); logger.error("Error loading the template for the " + AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + " index", e);
} }
try (XContentBuilder docMapping = ElasticsearchMappings.resultsMapping()) { try (XContentBuilder docMapping = ElasticsearchMappings.resultsMapping(SINGLE_MAPPING_NAME)) {
IndexTemplateMetaData jobResultsTemplate = IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobResultsIndexPrefix()) IndexTemplateMetaData jobResultsTemplate = IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobResultsIndexPrefix())
.patterns(Collections.singletonList(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*")) .patterns(Collections.singletonList(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*"))
.settings(Settings.builder() .settings(Settings.builder()
@ -728,7 +729,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async") .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async")
// set the default all search field // set the default all search field
.put(IndexSettings.DEFAULT_FIELD_SETTING.getKey(), ElasticsearchMappings.ALL_FIELD_VALUES)) .put(IndexSettings.DEFAULT_FIELD_SETTING.getKey(), ElasticsearchMappings.ALL_FIELD_VALUES))
.putMapping(ElasticsearchMappings.DOC_TYPE, Strings.toString(docMapping)) .putMapping(SINGLE_MAPPING_NAME, Strings.toString(docMapping))
.version(Version.CURRENT.id) .version(Version.CURRENT.id)
.build(); .build();
templates.put(AnomalyDetectorsIndex.jobResultsIndexPrefix(), jobResultsTemplate); templates.put(AnomalyDetectorsIndex.jobResultsIndexPrefix(), jobResultsTemplate);

View File

@ -17,7 +17,6 @@ import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
@ -66,6 +65,7 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
@ -414,7 +414,7 @@ public class MlConfigMigrator {
} }
private IndexRequest indexRequest(ToXContentObject source, String documentId, ToXContent.Params params) { private IndexRequest indexRequest(ToXContentObject source, String documentId, ToXContent.Params params) {
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName(), ElasticsearchMappings.DOC_TYPE, documentId); IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName()).id(documentId);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(source.toXContent(builder, params)); indexRequest.source(source.toXContent(builder, params));
@ -439,9 +439,9 @@ public class MlConfigMigrator {
logger.debug("taking a snapshot of ml_metadata"); logger.debug("taking a snapshot of ml_metadata");
String documentId = "ml-config"; String documentId = "ml-config";
IndexRequestBuilder indexRequest = client.prepareIndex(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias())
ElasticsearchMappings.DOC_TYPE, documentId) .id(documentId)
.setOpType(DocWriteRequest.OpType.CREATE); .opType(DocWriteRequest.OpType.CREATE);
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")); ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true"));
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
@ -449,7 +449,7 @@ public class MlConfigMigrator {
mlMetadata.toXContent(builder, params); mlMetadata.toXContent(builder, params);
builder.endObject(); builder.endObject();
indexRequest.setSource(builder); indexRequest.source(builder);
} catch (IOException e) { } catch (IOException e) {
logger.error("failed to serialise ml_metadata", e); logger.error("failed to serialise ml_metadata", e);
listener.onFailure(e); listener.onFailure(e);
@ -458,7 +458,7 @@ public class MlConfigMigrator {
AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client, clusterService.state(), ActionListener.wrap( AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client, clusterService.state(), ActionListener.wrap(
r -> { r -> {
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest.request(), executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest,
ActionListener.<IndexResponse>wrap( ActionListener.<IndexResponse>wrap(
indexResponse -> { indexResponse -> {
listener.onResponse(indexResponse.getResult() == DocWriteResponse.Result.CREATED); listener.onResponse(indexResponse.getResult() == DocWriteResponse.Result.CREATED);
@ -489,7 +489,7 @@ public class MlConfigMigrator {
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) .put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
); );
createIndexRequest.mapping(ElasticsearchMappings.DOC_TYPE, ElasticsearchMappings.configMapping()); createIndexRequest.mapping(SINGLE_MAPPING_NAME, ElasticsearchMappings.configMapping());
} catch (Exception e) { } catch (Exception e) {
logger.error("error writing the .ml-config mappings", e); logger.error("error writing the .ml-config mappings", e);
listener.onFailure(e); listener.onFailure(e);

View File

@ -55,7 +55,7 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction<D
ActionListener<Calendar> calendarListener = ActionListener.wrap( ActionListener<Calendar> calendarListener = ActionListener.wrap(
calendar -> { calendar -> {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, eventId); GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, eventId);
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap( executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap(
getResponse -> { getResponse -> {
if (getResponse.isExists() == false) { if (getResponse.isExists() == false) {
@ -89,7 +89,7 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction<D
} }
private void deleteEvent(String eventId, Calendar calendar, ActionListener<AcknowledgedResponse> listener) { private void deleteEvent(String eventId, Calendar calendar, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, eventId); DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, eventId);
deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest, executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, deleteRequest,

View File

@ -84,8 +84,7 @@ public class TransportDeleteFilterAction extends HandledTransportAction<DeleteFi
} }
private void deleteFilter(String filterId, ActionListener<AcknowledgedResponse> listener) { private void deleteFilter(String filterId, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId));
MlFilter.documentId(filterId));
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
bulkRequestBuilder.add(deleteRequest); bulkRequestBuilder.add(deleteRequest);
bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);

View File

@ -24,7 +24,6 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.utils.VoidChainTaskExecutor; import org.elasticsearch.xpack.ml.utils.VoidChainTaskExecutor;
@ -71,8 +70,7 @@ public class TransportFinalizeJobExecutionAction extends TransportMasterNodeActi
Map<String, Object> update = Collections.singletonMap(Job.FINISHED_TIME.getPreferredName(), new Date()); Map<String, Object> update = Collections.singletonMap(Job.FINISHED_TIME.getPreferredName(), new Date());
for (String jobId: request.getJobIds()) { for (String jobId: request.getJobIds()) {
UpdateRequest updateRequest = new UpdateRequest(AnomalyDetectorsIndex.configIndexName(), UpdateRequest updateRequest = new UpdateRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId));
updateRequest.retryOnConflict(3); updateRequest.retryOnConflict(3);
updateRequest.doc(update); updateRequest.doc(update);
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);

View File

@ -68,7 +68,7 @@ public class TransportGetFiltersAction extends HandledTransportAction<GetFilters
} }
private void getFilter(String filterId, ActionListener<GetFiltersAction.Response> listener) { private void getFilter(String filterId, ActionListener<GetFiltersAction.Response> listener) {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
public void onResponse(GetResponse getDocResponse) { public void onResponse(GetResponse getDocResponse) {

View File

@ -62,7 +62,7 @@ public class TransportPostCalendarEventsAction extends HandledTransportAction<Po
BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
for (ScheduledEvent event: events) { for (ScheduledEvent event: events) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(event.toXContent(builder, indexRequest.source(event.toXContent(builder,
new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE,

View File

@ -49,7 +49,7 @@ public class TransportPutCalendarAction extends HandledTransportAction<PutCalend
protected void doExecute(Task task, PutCalendarAction.Request request, ActionListener<PutCalendarAction.Response> listener) { protected void doExecute(Task task, PutCalendarAction.Request request, ActionListener<PutCalendarAction.Response> listener) {
Calendar calendar = request.getCalendar(); Calendar calendar = request.getCalendar();
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, calendar.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(calendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
indexRequest.source(calendar.toXContent(builder, indexRequest.source(calendar.toXContent(builder,
new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, "true")))); new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, "true"))));

View File

@ -48,7 +48,7 @@ public class TransportPutFilterAction extends HandledTransportAction<PutFilterAc
@Override @Override
protected void doExecute(Task task, PutFilterAction.Request request, ActionListener<PutFilterAction.Response> listener) { protected void doExecute(Task task, PutFilterAction.Request request, ActionListener<PutFilterAction.Response> listener) {
MlFilter filter = request.getFilter(); MlFilter filter = request.getFilter();
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId());
indexRequest.opType(DocWriteRequest.OpType.CREATE); indexRequest.opType(DocWriteRequest.OpType.CREATE);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {

View File

@ -103,7 +103,7 @@ public class TransportUpdateFilterAction extends HandledTransportAction<UpdateFi
private void indexUpdatedFilter(MlFilter filter, final long seqNo, final long primaryTerm, private void indexUpdatedFilter(MlFilter filter, final long seqNo, final long primaryTerm,
UpdateFilterAction.Request request, UpdateFilterAction.Request request,
ActionListener<PutFilterAction.Response> listener) { ActionListener<PutFilterAction.Response> listener) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId());
indexRequest.setIfSeqNo(seqNo); indexRequest.setIfSeqNo(seqNo);
indexRequest.setIfPrimaryTerm(primaryTerm); indexRequest.setIfPrimaryTerm(primaryTerm);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
@ -139,7 +139,7 @@ public class TransportUpdateFilterAction extends HandledTransportAction<UpdateFi
} }
private void getFilterWithVersion(String filterId, ActionListener<FilterWithSeqNo> listener) { private void getFilterWithVersion(String filterId, ActionListener<FilterWithSeqNo> listener) {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlFilter.documentId(filterId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
public void onResponse(GetResponse getDocResponse) { public void onResponse(GetResponse getDocResponse) {

View File

@ -23,7 +23,6 @@ import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction;
import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.core.ml.job.results.Result;
import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider;
@ -80,8 +79,7 @@ public class TransportUpdateModelSnapshotAction extends HandledTransportAction<U
} }
private void indexModelSnapshot(Result<ModelSnapshot> modelSnapshot, Consumer<Boolean> handler, Consumer<Exception> errorHandler) { private void indexModelSnapshot(Result<ModelSnapshot> modelSnapshot, Consumer<Boolean> handler, Consumer<Exception> errorHandler) {
IndexRequest indexRequest = new IndexRequest(modelSnapshot.index, ElasticsearchMappings.DOC_TYPE, IndexRequest indexRequest = new IndexRequest(modelSnapshot.index).id(ModelSnapshot.documentId(modelSnapshot.result));
ModelSnapshot.documentId(modelSnapshot.result));
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
modelSnapshot.result.toXContent(builder, ToXContent.EMPTY_PARAMS); modelSnapshot.result.toXContent(builder, ToXContent.EMPTY_PARAMS);
indexRequest.source(builder); indexRequest.source(builder);

View File

@ -19,7 +19,6 @@ import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
@ -50,7 +49,6 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams;
import org.elasticsearch.xpack.ml.job.persistence.ExpandedIdsMatcher; import org.elasticsearch.xpack.ml.job.persistence.ExpandedIdsMatcher;
@ -126,12 +124,11 @@ public class DatafeedConfigProvider {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder source = config.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); XContentBuilder source = config.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = client.prepareIndex(AnomalyDetectorsIndex.configIndexName(), IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(datafeedId)) .id(DatafeedConfig.documentId(datafeedId))
.setSource(source) .source(source)
.setOpType(DocWriteRequest.OpType.CREATE) .opType(DocWriteRequest.OpType.CREATE)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
.request();
executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap( executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap(
listener::onResponse, listener::onResponse,
@ -162,8 +159,7 @@ public class DatafeedConfigProvider {
* @param datafeedConfigListener The config listener * @param datafeedConfigListener The config listener
*/ */
public void getDatafeedConfig(String datafeedId, ActionListener<DatafeedConfig.Builder> datafeedConfigListener) { public void getDatafeedConfig(String datafeedId, ActionListener<DatafeedConfig.Builder> datafeedConfigListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId));
ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(datafeedId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
public void onResponse(GetResponse getResponse) { public void onResponse(GetResponse getResponse) {
@ -230,8 +226,7 @@ public class DatafeedConfigProvider {
* @param actionListener Deleted datafeed listener * @param actionListener Deleted datafeed listener
*/ */
public void deleteDatafeedConfig(String datafeedId, ActionListener<DeleteResponse> actionListener) { public void deleteDatafeedConfig(String datafeedId, ActionListener<DeleteResponse> actionListener) {
DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId));
ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(datafeedId));
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() {
@Override @Override
@ -268,8 +263,7 @@ public class DatafeedConfigProvider {
public void updateDatefeedConfig(String datafeedId, DatafeedUpdate update, Map<String, String> headers, public void updateDatefeedConfig(String datafeedId, DatafeedUpdate update, Map<String, String> headers,
BiConsumer<DatafeedConfig, ActionListener<Boolean>> validator, BiConsumer<DatafeedConfig, ActionListener<Boolean>> validator,
ActionListener<DatafeedConfig> updatedConfigListener) { ActionListener<DatafeedConfig> updatedConfigListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), DatafeedConfig.documentId(datafeedId));
ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(datafeedId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
@ -325,15 +319,15 @@ public class DatafeedConfigProvider {
ActionListener<IndexResponse> listener) { ActionListener<IndexResponse> listener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder updatedSource = updatedConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); XContentBuilder updatedSource = updatedConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequestBuilder indexRequest = client.prepareIndex(AnomalyDetectorsIndex.configIndexName(), IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(updatedConfig.getId())) .id(DatafeedConfig.documentId(updatedConfig.getId()))
.setSource(updatedSource) .source(updatedSource)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.setIfSeqNo(seqNo); indexRequest.setIfSeqNo(seqNo);
indexRequest.setIfPrimaryTerm(primaryTerm); indexRequest.setIfPrimaryTerm(primaryTerm);
executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest.request(), listener); executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, listener);
} catch (IOException e) { } catch (IOException e) {
listener.onFailure( listener.onFailure(

View File

@ -21,7 +21,6 @@ import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
@ -62,7 +61,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams;
@ -120,12 +118,11 @@ public class JobConfigProvider {
public void putJob(Job job, ActionListener<IndexResponse> listener) { public void putJob(Job job, ActionListener<IndexResponse> listener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder source = job.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); XContentBuilder source = job.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS));
IndexRequest indexRequest = client.prepareIndex(AnomalyDetectorsIndex.configIndexName(), IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
ElasticsearchMappings.DOC_TYPE, Job.documentId(job.getId())) .id(Job.documentId(job.getId()))
.setSource(source) .source(source)
.setOpType(DocWriteRequest.OpType.CREATE) .opType(DocWriteRequest.OpType.CREATE)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
.request();
executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap( executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap(
listener::onResponse, listener::onResponse,
@ -155,8 +152,7 @@ public class JobConfigProvider {
* @param jobListener Job listener * @param jobListener Job listener
*/ */
public void getJob(String jobId, ActionListener<Job.Builder> jobListener) { public void getJob(String jobId, ActionListener<Job.Builder> jobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId));
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() {
@Override @Override
@ -193,8 +189,7 @@ public class JobConfigProvider {
* @param actionListener Deleted job listener * @param actionListener Deleted job listener
*/ */
public void deleteJob(String jobId, boolean errorIfMissing, ActionListener<DeleteResponse> actionListener) { public void deleteJob(String jobId, boolean errorIfMissing, ActionListener<DeleteResponse> actionListener) {
DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), DeleteRequest request = new DeleteRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId));
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, DeleteAction.INSTANCE, request, new ActionListener<DeleteResponse>() {
@ -230,8 +225,7 @@ public class JobConfigProvider {
*/ */
public void updateJob(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit, public void updateJob(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit,
ActionListener<Job> updatedJobListener) { ActionListener<Job> updatedJobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
@ -295,8 +289,7 @@ public class JobConfigProvider {
*/ */
public void updateJobWithValidation(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit, public void updateJobWithValidation(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit,
UpdateValidator validator, ActionListener<Job> updatedJobListener) { UpdateValidator validator, ActionListener<Job> updatedJobListener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId));
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@Override @Override
@ -347,14 +340,14 @@ public class JobConfigProvider {
ActionListener<Job> updatedJobListener) { ActionListener<Job> updatedJobListener) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder updatedSource = updatedJob.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder updatedSource = updatedJob.toXContent(builder, ToXContent.EMPTY_PARAMS);
IndexRequestBuilder indexRequest = client.prepareIndex(AnomalyDetectorsIndex.configIndexName(), IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.configIndexName())
ElasticsearchMappings.DOC_TYPE, Job.documentId(updatedJob.getId())) .id(Job.documentId(updatedJob.getId()))
.setSource(updatedSource) .source(updatedSource)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.setIfSeqNo(seqNo); indexRequest.setIfSeqNo(seqNo);
indexRequest.setIfPrimaryTerm(primaryTerm); indexRequest.setIfPrimaryTerm(primaryTerm);
executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest.request(), ActionListener.wrap( executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap(
indexResponse -> { indexResponse -> {
assert indexResponse.getResult() == DocWriteResponse.Result.UPDATED; assert indexResponse.getResult() == DocWriteResponse.Result.UPDATED;
updatedJobListener.onResponse(updatedJob); updatedJobListener.onResponse(updatedJob);
@ -383,8 +376,7 @@ public class JobConfigProvider {
* @param listener Exists listener * @param listener Exists listener
*/ */
public void jobExists(String jobId, boolean errorIfMissing, ActionListener<Boolean> listener) { public void jobExists(String jobId, boolean errorIfMissing, ActionListener<Boolean> listener) {
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId));
getRequest.fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE); getRequest.fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE);
executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener<GetResponse>() {
@ -458,8 +450,7 @@ public class JobConfigProvider {
* @param listener Responds with true if successful else an error * @param listener Responds with true if successful else an error
*/ */
public void markJobAsDeleting(String jobId, ActionListener<Boolean> listener) { public void markJobAsDeleting(String jobId, ActionListener<Boolean> listener) {
UpdateRequest updateRequest = new UpdateRequest(AnomalyDetectorsIndex.configIndexName(), UpdateRequest updateRequest = new UpdateRequest(AnomalyDetectorsIndex.configIndexName(), Job.documentId(jobId));
ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId));
updateRequest.retryOnConflict(3); updateRequest.retryOnConflict(3);
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
updateRequest.doc(Collections.singletonMap(Job.DELETING.getPreferredName(), Boolean.TRUE)); updateRequest.doc(Collections.singletonMap(Job.DELETING.getPreferredName(), Boolean.TRUE));

View File

@ -17,7 +17,6 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
import java.io.IOException; import java.io.IOException;
@ -54,10 +53,9 @@ public class JobDataCountsPersister {
*/ */
public void persistDataCounts(String jobId, DataCounts counts, ActionListener<Boolean> listener) { public void persistDataCounts(String jobId, DataCounts counts, ActionListener<Boolean> listener) {
try (XContentBuilder content = serialiseCounts(counts)) { try (XContentBuilder content = serialiseCounts(counts)) {
final IndexRequest request = client.prepareIndex(AnomalyDetectorsIndex.resultsWriteAlias(jobId), ElasticsearchMappings.DOC_TYPE, final IndexRequest request = new IndexRequest(AnomalyDetectorsIndex.resultsWriteAlias(jobId))
DataCounts.documentId(jobId)) .id(DataCounts.documentId(jobId))
.setSource(content) .source(content);
.request();
executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, request, new ActionListener<IndexResponse>() { executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, request, new ActionListener<IndexResponse>() {
@Override @Override
public void onResponse(IndexResponse indexResponse) { public void onResponse(IndexResponse indexResponse) {

View File

@ -26,7 +26,6 @@ import java.util.List;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin;
import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE;
/** /**
@ -78,7 +77,7 @@ public class JobRenormalizedResultsPersister {
public void updateResult(String id, String index, ToXContent resultDoc) { public void updateResult(String id, String index, ToXContent resultDoc) {
try (XContentBuilder content = toXContentBuilder(resultDoc)) { try (XContentBuilder content = toXContentBuilder(resultDoc)) {
bulkRequest.add(new IndexRequest(index, DOC_TYPE, id).source(content)); bulkRequest.add(new IndexRequest(index).id(id).source(content));
} catch (IOException e) { } catch (IOException e) {
logger.error(new ParameterizedMessage("[{}] Error serialising result", jobId), e); logger.error(new ParameterizedMessage("[{}] Error serialising result", jobId), e);
} }

View File

@ -46,7 +46,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin;
import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE;
/** /**
* Persists result types, Quantiles etc to Elasticsearch<br> * Persists result types, Quantiles etc to Elasticsearch<br>
@ -175,7 +174,7 @@ public class JobResultsPersister {
private void indexResult(String id, ToXContent resultDoc, String resultType) { private void indexResult(String id, ToXContent resultDoc, String resultType) {
try (XContentBuilder content = toXContentBuilder(resultDoc)) { try (XContentBuilder content = toXContentBuilder(resultDoc)) {
bulkRequest.add(new IndexRequest(indexName, DOC_TYPE, id).source(content)); bulkRequest.add(new IndexRequest(indexName).id(id).source(content));
} catch (IOException e) { } catch (IOException e) {
logger.error(new ParameterizedMessage("[{}] Error serialising {}", jobId, resultType), e); logger.error(new ParameterizedMessage("[{}] Error serialising {}", jobId, resultType), e);
} }
@ -349,7 +348,7 @@ public class JobResultsPersister {
logCall(indexName); logCall(indexName);
try (XContentBuilder content = toXContentBuilder(object)) { try (XContentBuilder content = toXContentBuilder(object)) {
IndexRequest indexRequest = new IndexRequest(indexName, DOC_TYPE, id).source(content).setRefreshPolicy(refreshPolicy); IndexRequest indexRequest = new IndexRequest(indexName).id(id).source(content).setRefreshPolicy(refreshPolicy);
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest, listener, client::index); executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest, listener, client::index);
} catch (IOException e) { } catch (IOException e) {
logger.error(new ParameterizedMessage("[{}] Error writing [{}]", jobId, (id == null) ? "auto-generated ID" : id), e); logger.error(new ParameterizedMessage("[{}] Error writing [{}]", jobId, (id == null) ? "auto-generated ID" : id), e);

View File

@ -124,6 +124,7 @@ import java.util.function.Consumer;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.clientWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.clientWithOrigin;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
@ -289,8 +290,8 @@ public class JobResultsProvider {
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
// This assumes the requested mapping will be merged with mappings from the template, // This assumes the requested mapping will be merged with mappings from the template,
// and may need to be revisited if template merging is ever refactored // and may need to be revisited if template merging is ever refactored
try (XContentBuilder termFieldsMapping = ElasticsearchMappings.termFieldsMapping(ElasticsearchMappings.DOC_TYPE, termFields)) { try (XContentBuilder termFieldsMapping = ElasticsearchMappings.termFieldsMapping(termFields)) {
createIndexRequest.mapping(ElasticsearchMappings.DOC_TYPE, termFieldsMapping); createIndexRequest.mapping(SINGLE_MAPPING_NAME, termFieldsMapping);
} }
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest, executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest,
ActionListener.<CreateIndexResponse>wrap( ActionListener.<CreateIndexResponse>wrap(
@ -309,26 +310,22 @@ public class JobResultsProvider {
), client.admin().indices()::create); ), client.admin().indices()::create);
} else { } else {
long fieldCountLimit = MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.get(settings); long fieldCountLimit = MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.get(settings);
if (violatedFieldCountLimit(indexName, termFields.size(), fieldCountLimit, state)) { IndexMetaData indexMetaData = state.metaData().index(indexName);
if (violatedFieldCountLimit(termFields.size(), fieldCountLimit, indexMetaData)) {
String message = "Cannot create job in index '" + indexName + "' as the " + String message = "Cannot create job in index '" + indexName + "' as the " +
MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey() + " setting will be violated"; MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey() + " setting will be violated";
finalListener.onFailure(new IllegalArgumentException(message)); finalListener.onFailure(new IllegalArgumentException(message));
} else { } else {
updateIndexMappingWithTermFields(indexName, termFields, updateIndexMappingWithTermFields(indexName, indexMetaData.mapping().type(), termFields,
ActionListener.wrap(createAliasListener::onResponse, finalListener::onFailure)); ActionListener.wrap(createAliasListener::onResponse, finalListener::onFailure));
} }
} }
} }
public static boolean violatedFieldCountLimit( public static boolean violatedFieldCountLimit(long additionalFieldCount, long fieldCountLimit, IndexMetaData indexMetaData) {
String indexName, long additionalFieldCount, long fieldCountLimit, ClusterState clusterState) { MappingMetaData mapping = indexMetaData.mapping();
long numFields = 0; long numFields = countFields(mapping.sourceAsMap());
IndexMetaData indexMetaData = clusterState.metaData().index(indexName);
Iterator<MappingMetaData> mappings = indexMetaData.getMappings().valuesIt();
while (mappings.hasNext()) {
MappingMetaData mapping = mappings.next();
numFields += countFields(mapping.sourceAsMap());
}
return numFields + additionalFieldCount > fieldCountLimit; return numFields + additionalFieldCount > fieldCountLimit;
} }
@ -353,10 +350,12 @@ public class JobResultsProvider {
return count; return count;
} }
private void updateIndexMappingWithTermFields(String indexName, Collection<String> termFields, ActionListener<Boolean> listener) { private void updateIndexMappingWithTermFields(String indexName, String mappingType, Collection<String> termFields,
// Put the whole "doc" mapping, not just the term fields, otherwise we'll wipe the _meta section of the mapping ActionListener<Boolean> listener) {
try (XContentBuilder termFieldsMapping = ElasticsearchMappings.resultsMapping(termFields)) { // Put the whole mapping, not just the term fields, otherwise we'll wipe the _meta section of the mapping
final PutMappingRequest request = client.admin().indices().preparePutMapping(indexName).setType(ElasticsearchMappings.DOC_TYPE) try (XContentBuilder termFieldsMapping = ElasticsearchMappings.resultsMapping(mappingType, termFields)) {
final PutMappingRequest request = client.admin().indices().preparePutMapping(indexName)
.setType(mappingType)
.setSource(termFieldsMapping).request(); .setSource(termFieldsMapping).request();
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, request, new ActionListener<AcknowledgedResponse>() { executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, request, new ActionListener<AcknowledgedResponse>() {
@Override @Override
@ -504,7 +503,7 @@ public class JobResultsProvider {
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
return objectParser.apply(parser, null); return objectParser.apply(parser, null);
} catch (IOException e) { } catch (IOException e) {
errorHandler.accept(new ElasticsearchParseException("failed to parse " + hit.getType(), e)); errorHandler.accept(new ElasticsearchParseException("failed to parse " + hit.getId(), e));
return null; return null;
} }
} }
@ -1184,7 +1183,7 @@ public class JobResultsProvider {
currentJobs.removeAll(jobIdsToRemove); currentJobs.removeAll(jobIdsToRemove);
Calendar updatedCalendar = new Calendar(calendar.getId(), new ArrayList<>(currentJobs), calendar.getDescription()); Calendar updatedCalendar = new Calendar(calendar.getId(), new ArrayList<>(currentJobs), calendar.getDescription());
UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, updatedCalendar.documentId()); UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.INDEX_NAME, updatedCalendar.documentId());
updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
@ -1252,8 +1251,7 @@ public class JobResultsProvider {
ids.remove(jobId); ids.remove(jobId);
return new Calendar(c.getId(), new ArrayList<>(ids), c.getDescription()); return new Calendar(c.getId(), new ArrayList<>(ids), c.getDescription());
}).forEach(c -> { }).forEach(c -> {
UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, UpdateRequest updateRequest = new UpdateRequest(MlMetaIndex.INDEX_NAME, c.documentId());
c.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
updateRequest.doc(c.toXContent(builder, ToXContent.EMPTY_PARAMS)); updateRequest.doc(c.toXContent(builder, ToXContent.EMPTY_PARAMS));
} catch (IOException e) { } catch (IOException e) {
@ -1276,7 +1274,7 @@ public class JobResultsProvider {
} }
public void calendar(String calendarId, ActionListener<Calendar> listener) { public void calendar(String calendarId, ActionListener<Calendar> listener) {
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, Calendar.documentId(calendarId)); GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, Calendar.documentId(calendarId));
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() { executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, getRequest, new ActionListener<GetResponse>() {
@Override @Override
public void onResponse(GetResponse getDocResponse) { public void onResponse(GetResponse getDocResponse) {

View File

@ -15,7 +15,6 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
@ -75,7 +74,6 @@ public class StateStreamer {
try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) {
SearchResponse stateResponse = client.prepareSearch(indexName) SearchResponse stateResponse = client.prepareSearch(indexName)
.setTypes(ElasticsearchMappings.DOC_TYPE)
.setSize(1) .setSize(1)
.setQuery(QueryBuilders.idsQuery().addIds(stateDocId)).get(); .setQuery(QueryBuilders.idsQuery().addIds(stateDocId)).get();
if (stateResponse.getHits().getHits().length == 0) { if (stateResponse.getHits().getHits().length == 0) {
@ -102,7 +100,6 @@ public class StateStreamer {
try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) {
SearchResponse stateResponse = client.prepareSearch(indexName) SearchResponse stateResponse = client.prepareSearch(indexName)
.setTypes(ElasticsearchMappings.DOC_TYPE)
.setSize(1) .setSize(1)
.setQuery(QueryBuilders.idsQuery().addIds(docId)).get(); .setQuery(QueryBuilders.idsQuery().addIds(docId)).get();
if (stateResponse.getHits().getHits().length == 0) { if (stateResponse.getHits().getHits().length == 0) {

View File

@ -15,7 +15,6 @@ import org.elasticsearch.common.bytes.CompositeBytesReference;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.ml.process.StateProcessor; import org.elasticsearch.xpack.ml.process.StateProcessor;
import java.io.IOException; import java.io.IOException;
@ -98,7 +97,7 @@ public class AutodetectStateProcessor implements StateProcessor {
void persist(BytesReference bytes) throws IOException { void persist(BytesReference bytes) throws IOException {
BulkRequest bulkRequest = new BulkRequest(); BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bytes, AnomalyDetectorsIndex.jobStateIndexWriteAlias(), ElasticsearchMappings.DOC_TYPE, XContentType.JSON); bulkRequest.add(bytes, AnomalyDetectorsIndex.jobStateIndexWriteAlias(), XContentType.JSON);
if (bulkRequest.numberOfActions() > 0) { if (bulkRequest.numberOfActions() > 0) {
LOGGER.trace("[{}] Persisting job state document", jobId); LOGGER.trace("[{}] Persisting job state document", jobId);
try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) {

View File

@ -18,7 +18,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelState;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles;
@ -105,7 +104,6 @@ public class UnusedStateRemover implements MlDataRemover {
LOGGER.info("Found [{}] unused state documents; attempting to delete", LOGGER.info("Found [{}] unused state documents; attempting to delete",
unusedDocIds.size()); unusedDocIds.size());
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern()) DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern())
.types(ElasticsearchMappings.DOC_TYPE)
.setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setQuery(QueryBuilders.idsQuery().addIds(unusedDocIds.toArray(new String[0]))); .setQuery(QueryBuilders.idsQuery().addIds(unusedDocIds.toArray(new String[0])));
client.execute(DeleteByQueryAction.INSTANCE, deleteByQueryRequest, ActionListener.wrap( client.execute(DeleteByQueryAction.INSTANCE, deleteByQueryRequest, ActionListener.wrap(

View File

@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.notifications;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponse;
@ -38,30 +37,30 @@ public class Auditor {
} }
public void info(String jobId, String message) { public void info(String jobId, String message) {
indexDoc(AuditMessage.TYPE.getPreferredName(), AuditMessage.newInfo(jobId, message, nodeName)); indexDoc(AuditMessage.newInfo(jobId, message, nodeName));
} }
public void warning(String jobId, String message) { public void warning(String jobId, String message) {
indexDoc(AuditMessage.TYPE.getPreferredName(), AuditMessage.newWarning(jobId, message, nodeName)); indexDoc(AuditMessage.newWarning(jobId, message, nodeName));
} }
public void error(String jobId, String message) { public void error(String jobId, String message) {
indexDoc(AuditMessage.TYPE.getPreferredName(), AuditMessage.newError(jobId, message, nodeName)); indexDoc(AuditMessage.newError(jobId, message, nodeName));
} }
private void indexDoc(String type, ToXContent toXContent) { private void indexDoc(ToXContent toXContent) {
IndexRequest indexRequest = new IndexRequest(AuditorField.NOTIFICATIONS_INDEX, type); IndexRequest indexRequest = new IndexRequest(AuditorField.NOTIFICATIONS_INDEX);
indexRequest.source(toXContentBuilder(toXContent)); indexRequest.source(toXContentBuilder(toXContent));
indexRequest.timeout(TimeValue.timeValueSeconds(5)); indexRequest.timeout(TimeValue.timeValueSeconds(5));
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest, new ActionListener<IndexResponse>() { executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, indexRequest, new ActionListener<IndexResponse>() {
@Override @Override
public void onResponse(IndexResponse indexResponse) { public void onResponse(IndexResponse indexResponse) {
LOGGER.trace("Successfully persisted {}", type); LOGGER.trace("Successfully persisted audit message");
} }
@Override @Override
public void onFailure(Exception e) { public void onFailure(Exception e) {
LOGGER.debug(new ParameterizedMessage("Error writing {}", new Object[]{type}, e)); LOGGER.debug("Error writing audit message", e);
} }
}, client::index); }, client::index);
} }

View File

@ -20,7 +20,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -34,12 +33,9 @@ import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.notifications.AuditMessage;
import org.elasticsearch.xpack.core.ml.notifications.AuditorField;
import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.action.TransportStartDatafeedAction.DatafeedTask; import org.elasticsearch.xpack.ml.action.TransportStartDatafeedAction.DatafeedTask;
import org.elasticsearch.xpack.ml.action.TransportStartDatafeedActionTests; import org.elasticsearch.xpack.ml.action.TransportStartDatafeedActionTests;
import org.elasticsearch.xpack.ml.job.persistence.MockClientBuilder;
import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager;
import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.notifications.Auditor;
import org.junit.Before; import org.junit.Before;
@ -98,12 +94,6 @@ public class DatafeedManagerTests extends ESTestCase {
clusterService = mock(ClusterService.class); clusterService = mock(ClusterService.class);
when(clusterService.state()).thenReturn(cs.build()); when(clusterService.state()).thenReturn(cs.build());
ArgumentCaptor<XContentBuilder> argumentCaptor = ArgumentCaptor.forClass(XContentBuilder.class);
Client client = new MockClientBuilder("foo")
.prepareIndex(AuditorField.NOTIFICATIONS_INDEX, AuditMessage.TYPE.getPreferredName(), "responseId", argumentCaptor)
.build();
DiscoveryNode dNode = mock(DiscoveryNode.class); DiscoveryNode dNode = mock(DiscoveryNode.class);
when(dNode.getName()).thenReturn("this_node_has_a_name"); when(dNode.getName()).thenReturn("this_node_has_a_name");
when(clusterService.localNode()).thenReturn(dNode); when(clusterService.localNode()).thenReturn(dNode);
@ -136,8 +126,8 @@ public class DatafeedManagerTests extends ESTestCase {
AutodetectProcessManager autodetectProcessManager = mock(AutodetectProcessManager.class); AutodetectProcessManager autodetectProcessManager = mock(AutodetectProcessManager.class);
doAnswer(invocation -> hasOpenAutodetectCommunicator.get()).when(autodetectProcessManager).hasOpenAutodetectCommunicator(anyLong()); doAnswer(invocation -> hasOpenAutodetectCommunicator.get()).when(autodetectProcessManager).hasOpenAutodetectCommunicator(anyLong());
datafeedManager = new DatafeedManager(threadPool, client, clusterService, datafeedJobBuilder, () -> currentTime, auditor, datafeedManager = new DatafeedManager(threadPool, mock(Client.class), clusterService, datafeedJobBuilder,
autodetectProcessManager); () -> currentTime, auditor, autodetectProcessManager);
verify(clusterService).addListener(capturedClusterStateListener.capture()); verify(clusterService).addListener(capturedClusterStateListener.capture());
} }

View File

@ -504,7 +504,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (ScheduledEvent event : events) { for (ScheduledEvent event : events) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME);
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, "true")); ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, "true"));
indexRequest.source(event.toXContent(builder, params)); indexRequest.source(event.toXContent(builder, params));
@ -547,7 +547,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (MlFilter filter : filters) { for (MlFilter filter : filters) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(filter.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, "true")); ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, "true"));
indexRequest.source(filter.toXContent(builder, params)); indexRequest.source(filter.toXContent(builder, params));
@ -577,7 +577,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase {
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (Calendar calendar: calendars) { for (Calendar calendar: calendars) {
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, calendar.documentId()); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME).id(calendar.documentId());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, "true")); ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.INCLUDE_TYPE, "true"));
indexRequest.source(calendar.toXContent(builder, params)); indexRequest.source(calendar.toXContent(builder, params));

View File

@ -7,7 +7,7 @@ package org.elasticsearch.xpack.ml.integration;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.PlainActionFuture;
@ -39,7 +39,6 @@ import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
import org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck; import org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck;
import org.elasticsearch.xpack.ml.MlConfigMigrator; import org.elasticsearch.xpack.ml.MlConfigMigrator;
import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase;
@ -213,13 +212,12 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client(), clusterService.state(), future); AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client(), clusterService.state(), future);
future.actionGet(); future.actionGet();
IndexRequestBuilder indexRequest = client().prepareIndex(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias()).id("ml-config")
ElasticsearchMappings.DOC_TYPE, "ml-config") .source(Collections.singletonMap("a_field", "a_value"))
.setSource(Collections.singletonMap("a_field", "a_value")) .opType(DocWriteRequest.OpType.CREATE)
.setOpType(DocWriteRequest.OpType.CREATE)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.execute().actionGet(); client().index(indexRequest).actionGet();
doAnswer(invocation -> { doAnswer(invocation -> {
ClusterStateUpdateTask listener = (ClusterStateUpdateTask) invocation.getArguments()[1]; ClusterStateUpdateTask listener = (ClusterStateUpdateTask) invocation.getArguments()[1];
@ -386,7 +384,6 @@ public class MlConfigMigratorIT extends MlSingleNodeTestCase {
client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.jobStateIndexPattern()).get(); client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.jobStateIndexPattern()).get();
SearchResponse searchResponse = client() SearchResponse searchResponse = client()
.prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) .prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern())
.setTypes(ElasticsearchMappings.DOC_TYPE)
.setSize(1) .setSize(1)
.setQuery(QueryBuilders.idsQuery().addIds("ml-config")) .setQuery(QueryBuilders.idsQuery().addIds("ml-config"))
.get(); .get();

View File

@ -770,39 +770,38 @@ public class JobResultsProviderTests extends ESTestCase {
public void testViolatedFieldCountLimit() throws Exception { public void testViolatedFieldCountLimit() throws Exception {
Map<String, Object> mapping = new HashMap<>(); Map<String, Object> mapping = new HashMap<>();
for (int i = 0; i < 10; i++) {
int i = 0;
for (; i < 10; i++) {
mapping.put("field" + i, Collections.singletonMap("type", "string")); mapping.put("field" + i, Collections.singletonMap("type", "string"));
} }
IndexMetaData.Builder indexMetaData1 = new IndexMetaData.Builder("index1") IndexMetaData indexMetaData1 = new IndexMetaData.Builder("index1")
.settings(Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(new MappingMetaData("type1", Collections.singletonMap("properties", mapping)));
MetaData metaData = MetaData.builder()
.put(indexMetaData1)
.build();
boolean result = JobResultsProvider.violatedFieldCountLimit("index1", 0, 10,
ClusterState.builder(new ClusterName("_name")).metaData(metaData).build());
assertFalse(result);
result = JobResultsProvider.violatedFieldCountLimit("index1", 1, 10,
ClusterState.builder(new ClusterName("_name")).metaData(metaData).build());
assertTrue(result);
IndexMetaData.Builder indexMetaData2 = new IndexMetaData.Builder("index1")
.settings(Settings.builder() .settings(Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(new MappingMetaData("type1", Collections.singletonMap("properties", mapping))) .putMapping(new MappingMetaData("type1", Collections.singletonMap("properties", mapping)))
.putMapping(new MappingMetaData("type2", Collections.singletonMap("properties", mapping)));
metaData = MetaData.builder()
.put(indexMetaData2)
.build(); .build();
result = JobResultsProvider.violatedFieldCountLimit("index1", 0, 19, boolean result = JobResultsProvider.violatedFieldCountLimit(0, 10, indexMetaData1);
ClusterState.builder(new ClusterName("_name")).metaData(metaData).build()); assertFalse(result);
result = JobResultsProvider.violatedFieldCountLimit(1, 10, indexMetaData1);
assertTrue(result);
for (; i < 20; i++) {
mapping.put("field" + i, Collections.singletonMap("type", "string"));
}
IndexMetaData indexMetaData2 = new IndexMetaData.Builder("index1")
.settings(Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(new MappingMetaData("type1", Collections.singletonMap("properties", mapping)))
.build();
result = JobResultsProvider.violatedFieldCountLimit(0, 19, indexMetaData2);
assertTrue(result); assertTrue(result);
} }

View File

@ -28,14 +28,11 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.search.SearchScrollRequestBuilder;
import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
@ -355,39 +352,6 @@ public class MockClientBuilder {
return this; return this;
} }
public MockClientBuilder prepareSearchAnySize(String index, String type, SearchResponse response, ArgumentCaptor<QueryBuilder> filter) {
SearchRequestBuilder builder = mock(SearchRequestBuilder.class);
when(builder.setTypes(eq(type))).thenReturn(builder);
when(builder.addSort(any(SortBuilder.class))).thenReturn(builder);
when(builder.setQuery(filter.capture())).thenReturn(builder);
when(builder.setPostFilter(filter.capture())).thenReturn(builder);
when(builder.setFrom(any(Integer.class))).thenReturn(builder);
when(builder.setSize(any(Integer.class))).thenReturn(builder);
when(builder.setFetchSource(eq(true))).thenReturn(builder);
when(builder.addDocValueField(any(String.class))).thenReturn(builder);
when(builder.addDocValueField(any(String.class), any(String.class))).thenReturn(builder);
when(builder.addSort(any(String.class), any(SortOrder.class))).thenReturn(builder);
when(builder.get()).thenReturn(response);
when(client.prepareSearch(eq(index))).thenReturn(builder);
return this;
}
@SuppressWarnings("unchecked")
public MockClientBuilder prepareIndex(String index, String type, String responseId, ArgumentCaptor<XContentBuilder> getSource) {
IndexRequestBuilder builder = mock(IndexRequestBuilder.class);
PlainActionFuture<IndexResponse> actionFuture = mock(PlainActionFuture.class);
IndexResponse response = mock(IndexResponse.class);
when(response.getId()).thenReturn(responseId);
when(client.prepareIndex(eq(index), eq(type))).thenReturn(builder);
when(client.prepareIndex(eq(index), eq(type), any(String.class))).thenReturn(builder);
when(builder.setSource(getSource.capture())).thenReturn(builder);
when(builder.setRefreshPolicy(eq(RefreshPolicy.IMMEDIATE))).thenReturn(builder);
when(builder.execute()).thenReturn(actionFuture);
when(actionFuture.actionGet()).thenReturn(response);
return this;
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public MockClientBuilder prepareAlias(String indexName, String alias, QueryBuilder filter) { public MockClientBuilder prepareAlias(String indexName, String alias, QueryBuilder filter) {
when(aliasesRequestBuilder.addAlias(eq(indexName), eq(alias), eq(filter))).thenReturn(aliasesRequestBuilder); when(aliasesRequestBuilder.addAlias(eq(indexName), eq(alias), eq(filter))).thenReturn(aliasesRequestBuilder);

View File

@ -51,7 +51,6 @@ public class AuditorTests extends ESTestCase {
verify(client).index(indexRequestCaptor.capture(), any()); verify(client).index(indexRequestCaptor.capture(), any());
IndexRequest indexRequest = indexRequestCaptor.getValue(); IndexRequest indexRequest = indexRequestCaptor.getValue();
assertArrayEquals(new String[] {".ml-notifications"}, indexRequest.indices()); assertArrayEquals(new String[] {".ml-notifications"}, indexRequest.indices());
assertEquals("audit_message", indexRequest.type());
assertEquals(TimeValue.timeValueSeconds(5), indexRequest.timeout()); assertEquals(TimeValue.timeValueSeconds(5), indexRequest.timeout());
AuditMessage auditMessage = parseAuditMessage(indexRequest.source()); AuditMessage auditMessage = parseAuditMessage(indexRequest.source());
assertEquals("foo", auditMessage.getJobId()); assertEquals("foo", auditMessage.getJobId());
@ -66,7 +65,6 @@ public class AuditorTests extends ESTestCase {
verify(client).index(indexRequestCaptor.capture(), any()); verify(client).index(indexRequestCaptor.capture(), any());
IndexRequest indexRequest = indexRequestCaptor.getValue(); IndexRequest indexRequest = indexRequestCaptor.getValue();
assertArrayEquals(new String[] {".ml-notifications"}, indexRequest.indices()); assertArrayEquals(new String[] {".ml-notifications"}, indexRequest.indices());
assertEquals("audit_message", indexRequest.type());
assertEquals(TimeValue.timeValueSeconds(5), indexRequest.timeout()); assertEquals(TimeValue.timeValueSeconds(5), indexRequest.timeout());
AuditMessage auditMessage = parseAuditMessage(indexRequest.source()); AuditMessage auditMessage = parseAuditMessage(indexRequest.source());
assertEquals("bar", auditMessage.getJobId()); assertEquals("bar", auditMessage.getJobId());
@ -81,7 +79,6 @@ public class AuditorTests extends ESTestCase {
verify(client).index(indexRequestCaptor.capture(), any()); verify(client).index(indexRequestCaptor.capture(), any());
IndexRequest indexRequest = indexRequestCaptor.getValue(); IndexRequest indexRequest = indexRequestCaptor.getValue();
assertArrayEquals(new String[] {".ml-notifications"}, indexRequest.indices()); assertArrayEquals(new String[] {".ml-notifications"}, indexRequest.indices());
assertEquals("audit_message", indexRequest.type());
assertEquals(TimeValue.timeValueSeconds(5), indexRequest.timeout()); assertEquals(TimeValue.timeValueSeconds(5), indexRequest.timeout());
AuditMessage auditMessage = parseAuditMessage(indexRequest.source()); AuditMessage auditMessage = parseAuditMessage(indexRequest.source());
assertEquals("foobar", auditMessage.getJobId()); assertEquals("foobar", auditMessage.getJobId());

View File

@ -35,7 +35,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-custom-all-test-1 index: .ml-anomalies-custom-all-test-1
type: doc
id: custom_all_1464739200000_1_1 id: custom_all_1464739200000_1_1
body: body:
{ {
@ -62,7 +61,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-custom-all-test-2 index: .ml-anomalies-custom-all-test-2
type: doc
id: custom_all_1464739200000_1_2 id: custom_all_1464739200000_1_2
body: body:
{ {

View File

@ -34,7 +34,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "delete-forecast-job_model_forecast_someforecastid_1486591200000_1800_0_961_0" id: "delete-forecast-job_model_forecast_someforecastid_1486591200000_1800_0_961_0"
body: body:
{ {
@ -56,7 +55,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "delete-forecast-job_model_forecast_someforecastid_1486591300000_1800_0_961_0" id: "delete-forecast-job_model_forecast_someforecastid_1486591300000_1800_0_961_0"
body: body:
{ {
@ -78,7 +76,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "delete-forecast-job_model_forecast_request_stats_someforecastid" id: "delete-forecast-job_model_forecast_request_stats_someforecastid"
body: body:
{ {
@ -112,19 +109,16 @@ setup:
get: get:
id: delete-forecast-job_model_forecast_request_stats_someforecastid id: delete-forecast-job_model_forecast_request_stats_someforecastid
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
- do: - do:
catch: missing catch: missing
get: get:
id: delete-forecast-job_model_forecast_someforecastid_1486591300000_1800_0_961_0 id: delete-forecast-job_model_forecast_someforecastid_1486591300000_1800_0_961_0
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
- do: - do:
catch: missing catch: missing
get: get:
id: delete-forecast-job_model_forecast_someforecastid_1486591200000_1800_0_961_0 id: delete-forecast-job_model_forecast_someforecastid_1486591200000_1800_0_961_0
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
--- ---
"Test delete on _all forecasts not allow no forecasts": "Test delete on _all forecasts not allow no forecasts":

View File

@ -39,7 +39,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-delete-model-snapshot index: .ml-anomalies-delete-model-snapshot
type: doc
id: "delete-model-snapshot_model_snapshot_inactive-snapshot" id: "delete-model-snapshot_model_snapshot_inactive-snapshot"
body: > body: >
{ {
@ -57,7 +56,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "delete-model-snapshot_model_state_inactive-snapshot#1" id: "delete-model-snapshot_model_state_inactive-snapshot#1"
body: > body: >
{ {
@ -69,7 +67,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "delete-model-snapshot_model_state_inactive-snapshot#2" id: "delete-model-snapshot_model_state_inactive-snapshot#2"
body: > body: >
{ {
@ -82,7 +79,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-delete-model-snapshot index: .ml-anomalies-delete-model-snapshot
type: doc
id: "delete-model-snapshot_model_snapshot_active-snapshot" id: "delete-model-snapshot_model_snapshot_active-snapshot"
body: > body: >
{ {
@ -158,7 +154,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
count: count:
index: .ml-state index: .ml-state
type: doc
- match: { count: 3 } - match: { count: 3 }
@ -191,7 +186,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
count: count:
index: .ml-state index: .ml-state
type: doc
- match: { count: 1 } - match: { count: 1 }

View File

@ -8,7 +8,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-meta index: .ml-meta
type: doc
id: filter_imposter-filter id: filter_imposter-filter
body: > body: >
{ {

View File

@ -23,7 +23,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-get-model-snapshots index: .ml-anomalies-get-model-snapshots
type: doc
id: "get-model-snapshots-1" id: "get-model-snapshots-1"
body: > body: >
{ {
@ -39,7 +38,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-state index: .ml-state
type: doc
id: "get-model-snapshots_model_state_1#1" id: "get-model-snapshots_model_state_1#1"
body: > body: >
{ {
@ -51,7 +49,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-get-model-snapshots index: .ml-anomalies-get-model-snapshots
type: doc
id: "get-model-snapshots-2" id: "get-model-snapshots-2"
body: > body: >
{ {
@ -66,7 +63,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "get-model-snapshots_model_state_2#1" id: "get-model-snapshots_model_state_2#1"
body: > body: >
{ {
@ -77,7 +73,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "get-model-snapshots_model_state_2#2" id: "get-model-snapshots_model_state_2#2"
body: > body: >
{ {

View File

@ -186,7 +186,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: index-layout-job2_categorizer_state#1 id: index-layout-job2_categorizer_state#1
body: body:
key: value key: value
@ -196,7 +195,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: index-layout-job2_categorizer_state#2 id: index-layout-job2_categorizer_state#2
body: body:
key: value key: value
@ -299,7 +297,6 @@ setup:
headers: headers:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
count: count:
type: doc
index: .ml-state index: .ml-state
- match: {count: 0} - match: {count: 0}
@ -307,7 +304,6 @@ setup:
headers: headers:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
count: count:
type: doc
index: .ml-state index: .ml-state
- match: {count: 0} - match: {count: 0}
@ -315,7 +311,6 @@ setup:
headers: headers:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
count: count:
type: doc
index: .ml-state index: .ml-state
- match: {count: 0} - match: {count: 0}
@ -387,7 +382,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: foo index: foo
type: doc
body: body:
key: value key: value
@ -396,7 +390,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-anomalies-foo index: .ml-anomalies-foo
type: doc
body: body:
key: value key: value
@ -405,7 +398,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-anomalies-foo index: .ml-anomalies-foo
type: doc
body: body:
key: value key: value
job_id: foo job_id: foo
@ -512,7 +504,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: index-layout-quantiles-job_quantiles id: index-layout-quantiles-job_quantiles
body: body:
state: quantile-state state: quantile-state
@ -563,7 +554,6 @@ setup:
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "index-layout-state-job_model_snapshot_123" id: "index-layout-state-job_model_snapshot_123"
body: > body: >
{ {
@ -579,7 +569,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: index-layout-state-job_model_state_123#1 id: index-layout-state-job_model_state_123#1
body: body:
state: new-model-state state: new-model-state
@ -589,7 +578,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: index-layout-state-job_model_state_123#2 id: index-layout-state-job_model_state_123#2
body: body:
state: more-new-model-state state: more-new-model-state
@ -599,7 +587,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: index-layout-state-job_categorizer_state#1 id: index-layout-state-job_categorizer_state#1
body: body:
state: new-categorizer-state state: new-categorizer-state
@ -609,7 +596,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: index-layout-state-job_categorizer_state#2 id: index-layout-state-job_categorizer_state#2
body: body:
state: more-new-categorizer-state state: more-new-categorizer-state

View File

@ -23,7 +23,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-jobs-get-result-buckets index: .ml-anomalies-jobs-get-result-buckets
type: doc
id: "jobs-get-result-buckets_1464739200000_1" id: "jobs-get-result-buckets_1464739200000_1"
body: body:
{ {
@ -40,7 +39,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-jobs-get-result-buckets index: .ml-anomalies-jobs-get-result-buckets
type: doc
id: "jobs-get-result-buckets_1470009600000_2" id: "jobs-get-result-buckets_1470009600000_2"
body: body:
{ {
@ -57,7 +55,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-jobs-get-result-buckets index: .ml-anomalies-jobs-get-result-buckets
type: doc
id: "jobs-get-result-buckets_1470096000000_3" id: "jobs-get-result-buckets_1470096000000_3"
body: body:
{ {

View File

@ -23,7 +23,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-jobs-get-result-categories index: .ml-anomalies-jobs-get-result-categories
type: doc
id: jobs-get-result-categories-1 id: jobs-get-result-categories-1
body: { "job_id": "jobs-get-result-categories", "category_id": 1 } body: { "job_id": "jobs-get-result-categories", "category_id": 1 }
- do: - do:
@ -32,7 +31,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-jobs-get-result-categories index: .ml-anomalies-jobs-get-result-categories
type: doc
id: jobs-get-result-categories-2 id: jobs-get-result-categories-2
body: { "job_id": "jobs-get-result-categories", "category_id": 2 } body: { "job_id": "jobs-get-result-categories", "category_id": 2 }
- do: - do:
@ -41,7 +39,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-unrelated index: .ml-anomalies-unrelated
type: doc
id: jobs-get-result-categories-3 id: jobs-get-result-categories-3
body: { "job_id": "unrelated", "category_id": 1 } body: { "job_id": "unrelated", "category_id": 1 }

View File

@ -23,7 +23,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-get-influencers-test index: .ml-anomalies-get-influencers-test
type: doc
id: get-influencers-test_1464739200000_1_1 id: get-influencers-test_1464739200000_1_1
body: body:
{ {
@ -42,7 +41,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-get-influencers-test index: .ml-anomalies-get-influencers-test
type: doc
id: get-influencers-test_1464825600000_1_2 id: get-influencers-test_1464825600000_1_2
body: body:
{ {
@ -62,7 +60,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-get-influencers-test index: .ml-anomalies-get-influencers-test
type: doc
id: get-influencers-test_1464912000000_1_3 id: get-influencers-test_1464912000000_1_3
body: body:
{ {

View File

@ -64,7 +64,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-60_1" id: "jobs-get-result-overall-buckets-60_1"
body: body:
{ {
@ -81,7 +80,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-60_2" id: "jobs-get-result-overall-buckets-60_2"
body: body:
{ {
@ -98,7 +96,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-60_3" id: "jobs-get-result-overall-buckets-60_3"
body: body:
{ {
@ -114,7 +111,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-30_1" id: "jobs-get-result-overall-buckets-30_1"
body: body:
{ {
@ -131,7 +127,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-30_2" id: "jobs-get-result-overall-buckets-30_2"
body: body:
{ {
@ -148,7 +143,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-30_3" id: "jobs-get-result-overall-buckets-30_3"
body: body:
{ {
@ -165,7 +159,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-17_1" id: "jobs-get-result-overall-buckets-17_1"
body: body:
{ {
@ -182,7 +175,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-17_2" id: "jobs-get-result-overall-buckets-17_2"
body: body:
{ {
@ -199,7 +191,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-17_3" id: "jobs-get-result-overall-buckets-17_3"
body: body:
{ {
@ -216,7 +207,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "jobs-get-result-overall-buckets-17_4" id: "jobs-get-result-overall-buckets-17_4"
body: body:
{ {

View File

@ -23,7 +23,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-jobs-get-result-records index: .ml-anomalies-jobs-get-result-records
type: doc
id: jobs-get-result-records_1464739200000_1_1 id: jobs-get-result-records_1464739200000_1_1
body: body:
{ {
@ -40,7 +39,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-jobs-get-result-records index: .ml-anomalies-jobs-get-result-records
type: doc
id: jobs-get-result-records_1464825600000_1_2 id: jobs-get-result-records_1464825600000_1_2
body: body:
{ {

View File

@ -234,7 +234,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: job-stats-v54-bwc-test-data-counts id: job-stats-v54-bwc-test-data-counts
body: body:
{ {
@ -259,7 +258,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: job-stats-v54-bwc-test-model_size_stats id: job-stats-v54-bwc-test-model_size_stats
body: body:
{ {

View File

@ -25,7 +25,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-shared index: .ml-anomalies-shared
type: doc
id: "new_doc" id: "new_doc"
body: > body: >
{ {

View File

@ -89,7 +89,6 @@ setup:
- do: - do:
get: get:
index: .ml-anomalies-post-data-job index: .ml-anomalies-post-data-job
type: doc
id: post-data-job_data_counts id: post-data-job_data_counts
- match: { _source.processed_record_count: 2 } - match: { _source.processed_record_count: 2 }

View File

@ -39,7 +39,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_model_snapshot_first" id: "revert-model-snapshot_model_snapshot_first"
body: > body: >
{ {
@ -67,7 +66,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_model_snapshot_second" id: "revert-model-snapshot_model_snapshot_second"
body: > body: >
{ {
@ -95,7 +93,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_1464825600000_1" id: "revert-model-snapshot_1464825600000_1"
body: > body: >
{ {
@ -111,7 +108,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_1464782400000_1" id: "revert-model-snapshot_1464782400000_1"
body: > body: >
{ {
@ -127,7 +123,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_1462060800000_1" id: "revert-model-snapshot_1462060800000_1"
body: > body: >
{ {
@ -143,7 +138,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_1464825600000_1_1" id: "revert-model-snapshot_1464825600000_1_1"
body: > body: >
{ {
@ -159,7 +153,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_1462060800000_1_2" id: "revert-model-snapshot_1462060800000_1_2"
body: > body: >
{ {
@ -175,7 +168,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_1464825600000_1_3" id: "revert-model-snapshot_1464825600000_1_3"
body: { body: {
"job_id": "revert-model-snapshot", "job_id": "revert-model-snapshot",
@ -193,7 +185,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-revert-model-snapshot index: .ml-anomalies-revert-model-snapshot
type: doc
id: "revert-model-snapshot_1462060800000_1_4" id: "revert-model-snapshot_1462060800000_1_4"
body: body:
{ {

View File

@ -23,7 +23,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-update-model-snapshot index: .ml-anomalies-update-model-snapshot
type: doc
id: "update-model-snapshot_model_snapshot_snapshot-1" id: "update-model-snapshot_model_snapshot_snapshot-1"
body: > body: >
{ {
@ -39,7 +38,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "update-model-snapshot_model_state_1#1" id: "update-model-snapshot_model_state_1#1"
body: > body: >
{ {
@ -50,7 +48,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "update-model-snapshot_model_state_1#2" id: "update-model-snapshot_model_state_1#2"
body: > body: >
{ {
@ -61,7 +58,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "update-model-snapshot_model_state_1#3" id: "update-model-snapshot_model_state_1#3"
body: > body: >
{ {
@ -73,7 +69,6 @@ setup:
Content-Type: application/json Content-Type: application/json
index: index:
index: .ml-anomalies-update-model-snapshot index: .ml-anomalies-update-model-snapshot
type: doc
id: "update-model-snapshot_model_snapshot_snapshot-2" id: "update-model-snapshot_model_snapshot_snapshot-2"
body: > body: >
{ {
@ -90,7 +85,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "update-model-snapshot_model_state_2#1" id: "update-model-snapshot_model_state_2#1"
body: > body: >
{ {
@ -101,7 +95,6 @@ setup:
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
index: index:
index: .ml-state index: .ml-state
type: doc
id: "update-model-snapshot_model_state_2#2" id: "update-model-snapshot_model_state_2#2"
body: > body: >
{ {