mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 17:38:44 +00:00
[ML] Refactor filters API to not use _type (elastic/x-pack-elasticsearch#1483)
- Removes dependence on _type for filters. - Changes the put filter API to take the id in the URI - Prepares .ml-meta index to be able to host more types in future Relates elastic/x-pack-elasticsearch#668 Original commit: elastic/x-pack-elasticsearch@d4cffa9382
This commit is contained in:
parent
f3c94915b0
commit
1bb7651dba
@ -53,7 +53,7 @@ public class MachineLearningTemplateRegistry extends AbstractComponent implemen
|
||||
private final Client client;
|
||||
private final ThreadPool threadPool;
|
||||
|
||||
public static String [] TEMPLATE_NAMES = new String [] {Auditor.NOTIFICATIONS_INDEX, AnomalyDetectorsIndex.ML_META_INDEX,
|
||||
public static String [] TEMPLATE_NAMES = new String [] {Auditor.NOTIFICATIONS_INDEX, MlMetaIndex.INDEX_NAME,
|
||||
AnomalyDetectorsIndex.jobStateIndexName(), AnomalyDetectorsIndex.jobResultsIndexPrefix()};
|
||||
|
||||
final AtomicBoolean putMlNotificationsIndexTemplateCheck = new AtomicBoolean(false);
|
||||
@ -135,15 +135,14 @@ public class MachineLearningTemplateRegistry extends AbstractComponent implemen
|
||||
|
||||
private void addMlMetaIndexTemplate(MetaData metaData) {
|
||||
if (putMlMetaIndexTemplateCheck.compareAndSet(false, true)) {
|
||||
if (templateIsPresentAndUpToDate(AnomalyDetectorsIndex.ML_META_INDEX, metaData) == false) {
|
||||
if (templateIsPresentAndUpToDate(MlMetaIndex.INDEX_NAME, metaData) == false) {
|
||||
threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> {
|
||||
putMetaIndexTemplate((result, error) -> {
|
||||
putMlMetaIndexTemplateCheck.set(false);
|
||||
if (result) {
|
||||
logger.info("successfully created {} index template", AnomalyDetectorsIndex.ML_META_INDEX);
|
||||
logger.info("successfully created {} index template", MlMetaIndex.INDEX_NAME);
|
||||
} else {
|
||||
logger.error(new ParameterizedMessage(
|
||||
"not able to create {} index template", AnomalyDetectorsIndex.ML_META_INDEX), error);
|
||||
logger.error(new ParameterizedMessage("not able to create {} index template", MlMetaIndex.INDEX_NAME), error);
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -217,10 +216,17 @@ public class MachineLearningTemplateRegistry extends AbstractComponent implemen
|
||||
* Index template for meta data
|
||||
*/
|
||||
void putMetaIndexTemplate(BiConsumer<Boolean, Exception> listener) {
|
||||
PutIndexTemplateRequest templateRequest = new PutIndexTemplateRequest(AnomalyDetectorsIndex.ML_META_INDEX);
|
||||
templateRequest.patterns(Collections.singletonList(AnomalyDetectorsIndex.ML_META_INDEX));
|
||||
PutIndexTemplateRequest templateRequest = new PutIndexTemplateRequest(MlMetaIndex.INDEX_NAME);
|
||||
templateRequest.patterns(Collections.singletonList(MlMetaIndex.INDEX_NAME));
|
||||
templateRequest.settings(mlNotificationIndexSettings());
|
||||
templateRequest.version(Version.CURRENT.id);
|
||||
try (XContentBuilder defaultMapping = ElasticsearchMappings.defaultMapping()) {
|
||||
templateRequest.mapping(MapperService.DEFAULT_MAPPING, defaultMapping);
|
||||
} catch (IOException e) {
|
||||
String msg = "Error creating template mappings for the " + MlMetaIndex.INDEX_NAME + " index";
|
||||
logger.error(msg, e);
|
||||
listener.accept(false, new ElasticsearchException(msg, e));
|
||||
}
|
||||
|
||||
client.admin().indices().putTemplate(templateRequest,
|
||||
ActionListener.wrap(r -> listener.accept(true, null), e -> listener.accept(false, e)));
|
||||
|
@ -0,0 +1,18 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml;
|
||||
|
||||
public final class MlMetaIndex {
|
||||
/**
|
||||
* Where to store the ml info in Elasticsearch - must match what's
|
||||
* expected by kibana/engineAPI/app/directives/mlLogUsage.js
|
||||
*/
|
||||
public static final String INDEX_NAME = ".ml-meta";
|
||||
|
||||
public static final String TYPE = "doc";
|
||||
|
||||
private MlMetaIndex() {}
|
||||
}
|
@ -30,11 +30,10 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.MlMetaIndex;
|
||||
import org.elasticsearch.xpack.ml.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.job.config.Detector;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.MlFilter;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -179,8 +178,7 @@ public class DeleteFilterAction extends Action<DeleteFilterAction.Request, Delet
|
||||
+ currentlyUsedBy);
|
||||
}
|
||||
|
||||
DeleteRequest deleteRequest = new DeleteRequest(AnomalyDetectorsIndex.ML_META_INDEX,
|
||||
MlFilter.TYPE.getPreferredName(), filterId);
|
||||
DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filterId);
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(deleteRequest);
|
||||
transportAction.execute(bulkRequest, new ActionListener<BulkResponse>() {
|
||||
|
@ -19,7 +19,6 @@ import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.Strings;
|
||||
@ -33,16 +32,16 @@ import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.MlMetaIndex;
|
||||
import org.elasticsearch.xpack.ml.action.util.PageParams;
|
||||
import org.elasticsearch.xpack.ml.action.util.QueryPage;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.job.config.MlFilter;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -246,7 +245,7 @@ public class GetFiltersAction extends Action<GetFiltersAction.Request, GetFilter
|
||||
}
|
||||
|
||||
private void getFilter(String filterId, ActionListener<Response> listener) {
|
||||
GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.ML_META_INDEX, MlFilter.TYPE.getPreferredName(), filterId);
|
||||
GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filterId);
|
||||
transportGetAction.execute(getRequest, new ActionListener<GetResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetResponse getDocResponse) {
|
||||
@ -257,7 +256,7 @@ public class GetFiltersAction extends Action<GetFiltersAction.Request, GetFilter
|
||||
BytesReference docSource = getDocResponse.getSourceAsBytesRef();
|
||||
XContentParser parser =
|
||||
XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource);
|
||||
MlFilter filter = MlFilter.PARSER.apply(parser, null);
|
||||
MlFilter filter = MlFilter.PARSER.apply(parser, null).build();
|
||||
responseBody = new QueryPage<>(Collections.singletonList(filter), 1, MlFilter.RESULTS_FIELD);
|
||||
|
||||
Response filterResponse = new Response(responseBody);
|
||||
@ -281,31 +280,29 @@ public class GetFiltersAction extends Action<GetFiltersAction.Request, GetFilter
|
||||
private void getFilters(PageParams pageParams, ActionListener<Response> listener) {
|
||||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder()
|
||||
.from(pageParams.getFrom())
|
||||
.size(pageParams.getSize());
|
||||
.size(pageParams.getSize())
|
||||
.query(QueryBuilders.termQuery(MlFilter.TYPE.getPreferredName(), MlFilter.FILTER_TYPE));
|
||||
|
||||
SearchRequest searchRequest = new SearchRequest(new String[]{AnomalyDetectorsIndex.ML_META_INDEX}, sourceBuilder)
|
||||
SearchRequest searchRequest = new SearchRequest(MlMetaIndex.INDEX_NAME)
|
||||
.indicesOptions(JobProvider.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS))
|
||||
.types(MlFilter.TYPE.getPreferredName());
|
||||
.source(sourceBuilder);
|
||||
|
||||
transportSearchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(SearchResponse response) {
|
||||
|
||||
try {
|
||||
List<MlFilter> docs = new ArrayList<>();
|
||||
for (SearchHit hit : response.getHits().getHits()) {
|
||||
BytesReference docSource = hit.getSourceRef();
|
||||
XContentParser parser =
|
||||
XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource);
|
||||
docs.add(MlFilter.PARSER.apply(parser, null));
|
||||
List<MlFilter> docs = new ArrayList<>();
|
||||
for (SearchHit hit : response.getHits().getHits()) {
|
||||
BytesReference docSource = hit.getSourceRef();
|
||||
try (XContentParser parser = XContentFactory.xContent(docSource).createParser(
|
||||
NamedXContentRegistry.EMPTY, docSource)) {
|
||||
docs.add(MlFilter.PARSER.apply(parser, null).build());
|
||||
} catch (IOException e) {
|
||||
this.onFailure(e);
|
||||
}
|
||||
|
||||
Response filterResponse = new Response(new QueryPage<>(docs, docs.size(), MlFilter.RESULTS_FIELD));
|
||||
listener.onResponse(filterResponse);
|
||||
|
||||
} catch (Exception e) {
|
||||
this.onFailure(e);
|
||||
}
|
||||
|
||||
Response filterResponse = new Response(new QueryPage<>(docs, docs.size(), MlFilter.RESULTS_FIELD));
|
||||
listener.onResponse(filterResponse);
|
||||
}
|
||||
|
||||
|
||||
@ -316,6 +313,5 @@ public class GetFiltersAction extends Action<GetFiltersAction.Request, GetFilter
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -46,6 +46,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.elasticsearch.xpack.ml.MlMetaIndex;
|
||||
import org.elasticsearch.xpack.ml.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobTaskStatus;
|
||||
@ -657,7 +658,7 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
|
||||
|
||||
static String[] indicesOfInterest(ClusterState clusterState, String job) {
|
||||
String jobResultIndex = AnomalyDetectorsIndex.getPhysicalIndexFromState(clusterState, job);
|
||||
return new String[]{AnomalyDetectorsIndex.jobStateIndexName(), jobResultIndex, AnomalyDetectorsIndex.ML_META_INDEX};
|
||||
return new String[]{AnomalyDetectorsIndex.jobStateIndexName(), jobResultIndex, MlMetaIndex.INDEX_NAME};
|
||||
}
|
||||
|
||||
static List<String> verifyIndicesPrimaryShardsAreActive(String jobId, ClusterState clusterState) {
|
||||
|
@ -18,9 +18,9 @@ import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
@ -31,13 +31,14 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.MlMetaIndex;
|
||||
import org.elasticsearch.xpack.ml.job.config.MlFilter;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.watcher.watch.Payload;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Objects;
|
||||
|
||||
|
||||
@ -62,9 +63,16 @@ public class PutFilterAction extends Action<PutFilterAction.Request, PutFilterAc
|
||||
|
||||
public static class Request extends MasterNodeReadRequest<Request> implements ToXContent {
|
||||
|
||||
public static Request parseRequest(XContentParser parser) {
|
||||
MlFilter filter = MlFilter.PARSER.apply(parser, null);
|
||||
return new Request(filter);
|
||||
public static Request parseRequest(String filterId, XContentParser parser) {
|
||||
MlFilter.Builder filter = MlFilter.PARSER.apply(parser, null);
|
||||
if (filter.getId() == null) {
|
||||
filter.setId(filterId);
|
||||
} else if (!Strings.isNullOrEmpty(filterId) && !filterId.equals(filter.getId())) {
|
||||
// If we have both URI and body filter ID, they must be identical
|
||||
throw new IllegalArgumentException(Messages.getMessage(Messages.INCONSISTENT_ID, MlFilter.ID.getPreferredName(),
|
||||
filter.getId(), filterId));
|
||||
}
|
||||
return new Request(filter.build());
|
||||
}
|
||||
|
||||
private MlFilter filter;
|
||||
@ -167,12 +175,12 @@ public class PutFilterAction extends Action<PutFilterAction.Request, PutFilterAc
|
||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||
MlFilter filter = request.getFilter();
|
||||
final String filterId = filter.getId();
|
||||
IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.ML_META_INDEX, MlFilter.TYPE.getPreferredName(), filterId);
|
||||
IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filterId);
|
||||
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
|
||||
indexRequest.source(filter.toXContent(builder, ToXContent.EMPTY_PARAMS));
|
||||
Payload.XContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlFilter.INCLUDE_TYPE_KEY, "true"));
|
||||
indexRequest.source(filter.toXContent(builder, params));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(
|
||||
"Failed to serialise filter with id [" + filter.getId() + "]", e);
|
||||
throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e);
|
||||
}
|
||||
BulkRequest bulkRequest = new BulkRequest().add(indexRequest);
|
||||
|
||||
|
@ -6,34 +6,39 @@
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class MlFilter extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField TYPE = new ParseField("filter");
|
||||
public static final ParseField ID = new ParseField("id");
|
||||
|
||||
public static final String INCLUDE_TYPE_KEY = "include_type";
|
||||
public static final String FILTER_TYPE = "filter";
|
||||
|
||||
public static final ParseField TYPE = new ParseField("type");
|
||||
public static final ParseField ID = new ParseField("filter_id");
|
||||
public static final ParseField ITEMS = new ParseField("items");
|
||||
|
||||
// For QueryPage
|
||||
public static final ParseField RESULTS_FIELD = new ParseField("filters");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<MlFilter, Void> PARSER = new ConstructingObjectParser<>(
|
||||
TYPE.getPreferredName(), a -> new MlFilter((String) a[0], (List<String>) a[1]));
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(TYPE.getPreferredName(), Builder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
|
||||
PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), ITEMS);
|
||||
PARSER.declareString((builder, s) -> {}, TYPE);
|
||||
PARSER.declareString(Builder::setId, ID);
|
||||
PARSER.declareStringArray(Builder::setItems, ITEMS);
|
||||
}
|
||||
|
||||
private final String id;
|
||||
@ -60,6 +65,9 @@ public class MlFilter extends ToXContentToBytes implements Writeable {
|
||||
builder.startObject();
|
||||
builder.field(ID.getPreferredName(), id);
|
||||
builder.field(ITEMS.getPreferredName(), items);
|
||||
if (params.paramAsBoolean(INCLUDE_TYPE_KEY, false)) {
|
||||
builder.field(TYPE.getPreferredName(), FILTER_TYPE);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
@ -90,4 +98,31 @@ public class MlFilter extends ToXContentToBytes implements Writeable {
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, items);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private String id;
|
||||
private List<String> items = Collections.emptyList();
|
||||
|
||||
public Builder setId(String id) {
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
private Builder() {}
|
||||
|
||||
@Nullable
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public Builder setItems(List<String> items) {
|
||||
this.items = items;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MlFilter build() {
|
||||
return new MlFilter(id, items);
|
||||
}
|
||||
}
|
||||
}
|
@ -12,11 +12,6 @@ import org.elasticsearch.xpack.ml.MlMetadata;
|
||||
* Methods for handling index naming related functions
|
||||
*/
|
||||
public final class AnomalyDetectorsIndex {
|
||||
/**
|
||||
* Where to store the ml info in Elasticsearch - must match what's
|
||||
* expected by kibana/engineAPI/app/directives/mlLogUsage.js
|
||||
*/
|
||||
public static final String ML_META_INDEX = ".ml-meta";
|
||||
|
||||
public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-";
|
||||
private static final String STATE_INDEX_NAME = ".ml-state";
|
||||
|
@ -52,6 +52,7 @@ import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.xpack.ml.MlMetaIndex;
|
||||
import org.elasticsearch.xpack.ml.action.GetBucketsAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetCategoriesAction;
|
||||
import org.elasticsearch.xpack.ml.action.GetInfluencersAction;
|
||||
@ -92,8 +93,6 @@ import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex.ML_META_INDEX;
|
||||
|
||||
public class JobProvider {
|
||||
private static final Logger LOGGER = Loggers.getLogger(JobProvider.class);
|
||||
|
||||
@ -259,7 +258,7 @@ public class JobProvider {
|
||||
.add(createDocIdSearch(stateIndex, Quantiles.TYPE.getPreferredName(), Quantiles.documentId(jobId)));
|
||||
|
||||
for (String filterId : job.getAnalysisConfig().extractReferencedFilters()) {
|
||||
msearch.add(createDocIdSearch(ML_META_INDEX, MlFilter.TYPE.getPreferredName(), filterId));
|
||||
msearch.add(createDocIdSearch(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filterId));
|
||||
}
|
||||
|
||||
msearch.execute(ActionListener.wrap(
|
||||
@ -322,7 +321,7 @@ public class JobProvider {
|
||||
} else if (Quantiles.TYPE.getPreferredName().equals(type)) {
|
||||
paramsBuilder.setQuantiles(parseSearchHit(hit, Quantiles.PARSER, errorHandler));
|
||||
} else if (MlFilter.TYPE.getPreferredName().equals(type)) {
|
||||
paramsBuilder.addFilter(parseSearchHit(hit, MlFilter.PARSER, errorHandler));
|
||||
paramsBuilder.addFilter(parseSearchHit(hit, MlFilter.PARSER, errorHandler).build());
|
||||
} else {
|
||||
errorHandler.accept(new IllegalStateException("Unexpected type [" + type + "]"));
|
||||
}
|
||||
|
@ -14,6 +14,7 @@ import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.AcknowledgedRestListener;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.elasticsearch.xpack.ml.action.PutFilterAction;
|
||||
import org.elasticsearch.xpack.ml.job.config.MlFilter;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -21,14 +22,16 @@ public class RestPutFilterAction extends BaseRestHandler {
|
||||
|
||||
public RestPutFilterAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.PUT, MachineLearning.BASE_PATH + "filters", this);
|
||||
controller.registerHandler(RestRequest.Method.PUT,
|
||||
MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
String filterId = restRequest.param(MlFilter.ID.getPreferredName());
|
||||
XContentParser parser = restRequest.contentOrSourceParamParser();
|
||||
PutFilterAction.Request putListRequest = PutFilterAction.Request.parseRequest(parser);
|
||||
return channel -> client.execute(PutFilterAction.INSTANCE, putListRequest, new AcknowledgedRestListener<>(channel));
|
||||
PutFilterAction.Request putFilterRequest = PutFilterAction.Request.parseRequest(filterId, parser);
|
||||
return channel -> client.execute(PutFilterAction.INSTANCE, putFilterRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -44,7 +44,6 @@ import java.util.concurrent.ExecutorService;
|
||||
|
||||
import static org.elasticsearch.mock.orig.Mockito.doAnswer;
|
||||
import static org.elasticsearch.mock.orig.Mockito.times;
|
||||
import static org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex.ML_META_INDEX;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.mockito.Matchers.any;
|
||||
@ -125,7 +124,7 @@ public class MachineLearningTemplateRegistryTests extends ESTestCase {
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
))
|
||||
.put(IndexMetaData.builder(AnomalyDetectorsIndex.ML_META_INDEX).settings(Settings.builder()
|
||||
.put(IndexMetaData.builder(MlMetaIndex.INDEX_NAME).settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
@ -136,7 +135,7 @@ public class MachineLearningTemplateRegistryTests extends ESTestCase {
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
))
|
||||
.put(IndexTemplateMetaData.builder(Auditor.NOTIFICATIONS_INDEX).version(Version.CURRENT.id).build())
|
||||
.put(IndexTemplateMetaData.builder(AnomalyDetectorsIndex.ML_META_INDEX).version(Version.CURRENT.id).build())
|
||||
.put(IndexTemplateMetaData.builder(MlMetaIndex.INDEX_NAME).version(Version.CURRENT.id).build())
|
||||
.put(IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobStateIndexName()).version(Version.CURRENT.id).build())
|
||||
.put(IndexTemplateMetaData.builder(
|
||||
AnomalyDetectorsIndex.jobResultsIndexPrefix()).version(Version.CURRENT.id).build())
|
||||
@ -221,8 +220,9 @@ public class MachineLearningTemplateRegistryTests extends ESTestCase {
|
||||
PutIndexTemplateRequest request = captor.getValue();
|
||||
assertNotNull(request);
|
||||
assertEquals(templateRegistry.mlNotificationIndexSettings().build(), request.settings());
|
||||
assertEquals(0, request.mappings().size());
|
||||
assertEquals(Collections.singletonList(ML_META_INDEX), request.patterns());
|
||||
assertEquals(1, request.mappings().size());
|
||||
assertThat(request.mappings().containsKey(MapperService.DEFAULT_MAPPING), is(true));
|
||||
assertEquals(Collections.singletonList(MlMetaIndex.INDEX_NAME), request.patterns());
|
||||
assertEquals(new Integer(Version.CURRENT.id), request.version());
|
||||
});
|
||||
}
|
||||
@ -294,7 +294,7 @@ public class MachineLearningTemplateRegistryTests extends ESTestCase {
|
||||
public void testAllTemplatesInstalled() {
|
||||
MetaData metaData = MetaData.builder()
|
||||
.put(IndexTemplateMetaData.builder(Auditor.NOTIFICATIONS_INDEX).version(Version.CURRENT.id).build())
|
||||
.put(IndexTemplateMetaData.builder(AnomalyDetectorsIndex.ML_META_INDEX).version(Version.CURRENT.id).build())
|
||||
.put(IndexTemplateMetaData.builder(MlMetaIndex.INDEX_NAME).version(Version.CURRENT.id).build())
|
||||
.put(IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobStateIndexName()).version(Version.CURRENT.id).build())
|
||||
.put(IndexTemplateMetaData.builder(
|
||||
AnomalyDetectorsIndex.jobResultsIndexPrefix()).version(Version.CURRENT.id).build()).build();
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.elasticsearch.xpack.ml.MlMetaIndex;
|
||||
import org.elasticsearch.xpack.ml.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.config.JobState;
|
||||
@ -355,7 +356,7 @@ public class OpenJobActionTests extends ESTestCase {
|
||||
String... jobIds) {
|
||||
List<String> indices = new ArrayList<>();
|
||||
indices.add(AnomalyDetectorsIndex.jobStateIndexName());
|
||||
indices.add(AnomalyDetectorsIndex.ML_META_INDEX);
|
||||
indices.add(MlMetaIndex.INDEX_NAME);
|
||||
indices.add(Auditor.NOTIFICATIONS_INDEX);
|
||||
indices.add(AnomalyDetectorsIndex.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndex.RESULTS_INDEX_DEFAULT);
|
||||
for (String indexName : indices) {
|
||||
|
@ -13,7 +13,9 @@ import org.elasticsearch.xpack.ml.support.AbstractStreamableXContentTestCase;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class CreateFilterActionRequestTests extends AbstractStreamableXContentTestCase<PutFilterAction.Request> {
|
||||
public class PutFilterActionRequestTests extends AbstractStreamableXContentTestCase<PutFilterAction.Request> {
|
||||
|
||||
private final String filterId = randomAlphaOfLengthBetween(1, 20);
|
||||
|
||||
@Override
|
||||
protected Request createTestInstance() {
|
||||
@ -22,7 +24,7 @@ public class CreateFilterActionRequestTests extends AbstractStreamableXContentTe
|
||||
for (int i = 0; i < size; i++) {
|
||||
items.add(randomAlphaOfLengthBetween(1, 20));
|
||||
}
|
||||
MlFilter filter = new MlFilter(randomAlphaOfLengthBetween(1, 20), items);
|
||||
MlFilter filter = new MlFilter(filterId, items);
|
||||
return new PutFilterAction.Request(filter);
|
||||
}
|
||||
|
||||
@ -33,7 +35,7 @@ public class CreateFilterActionRequestTests extends AbstractStreamableXContentTe
|
||||
|
||||
@Override
|
||||
protected Request parseInstance(XContentParser parser) {
|
||||
return PutFilterAction.Request.parseRequest(parser);
|
||||
return PutFilterAction.Request.parseRequest(filterId, parser);
|
||||
}
|
||||
|
||||
}
|
@ -32,7 +32,7 @@ public class MlFilterTests extends AbstractSerializingTestCase<MlFilter> {
|
||||
|
||||
@Override
|
||||
protected MlFilter parseInstance(XContentParser parser) {
|
||||
return MlFilter.PARSER.apply(parser, null);
|
||||
return MlFilter.PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
public void testNullId() {
|
||||
|
@ -2,9 +2,15 @@
|
||||
"xpack.ml.put_filter": {
|
||||
"methods": [ "PUT" ],
|
||||
"url": {
|
||||
"path": "/_xpack/ml/filters",
|
||||
"paths": [ "/_xpack/ml/filters" ],
|
||||
"params": {}
|
||||
"path": "/_xpack/ml/filters/{filter_id}",
|
||||
"paths": [ "/_xpack/ml/filters/{filter_id}" ],
|
||||
"parts": {
|
||||
"filter_id": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "The ID of the filter to create"
|
||||
}
|
||||
}
|
||||
},
|
||||
"body": {
|
||||
"description" : "The filter details",
|
||||
|
@ -1,18 +1,31 @@
|
||||
---
|
||||
setup:
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: .ml-meta
|
||||
type: doc
|
||||
id: imposter-filter
|
||||
body: >
|
||||
{
|
||||
"filter_id": "imposter",
|
||||
"items": ["a", "b"],
|
||||
"type": "imposter"
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_filter:
|
||||
filter_id: filter-foo
|
||||
body: >
|
||||
{
|
||||
"id": "filter-foo",
|
||||
"items": ["abc", "xyz"]
|
||||
}
|
||||
|
||||
- do:
|
||||
xpack.ml.put_filter:
|
||||
filter_id: filter-foo2
|
||||
body: >
|
||||
{
|
||||
"id": "filter-foo2",
|
||||
"items": ["123", "lmnop"]
|
||||
}
|
||||
|
||||
@ -37,7 +50,7 @@ setup:
|
||||
- match: { count: 1 }
|
||||
- match:
|
||||
filters.0:
|
||||
id: "filter-foo"
|
||||
filter_id: "filter-foo"
|
||||
items: ["abc", "xyz"]
|
||||
|
||||
---
|
||||
@ -49,12 +62,12 @@ setup:
|
||||
- match: { count: 2 }
|
||||
- match:
|
||||
filters.0:
|
||||
id: "filter-foo"
|
||||
filter_id: "filter-foo"
|
||||
items: ["abc", "xyz"]
|
||||
|
||||
- match:
|
||||
filters.1:
|
||||
id: "filter-foo2"
|
||||
filter_id: "filter-foo2"
|
||||
items: ["123", "lmnop"]
|
||||
|
||||
- do:
|
||||
@ -74,24 +87,24 @@ setup:
|
||||
from: 0
|
||||
|
||||
- do:
|
||||
catch: request
|
||||
xpack.ml.get_filters:
|
||||
filter_id: "filter-foo"
|
||||
size: 1
|
||||
catch: request
|
||||
xpack.ml.get_filters:
|
||||
filter_id: "filter-foo"
|
||||
size: 1
|
||||
|
||||
- do:
|
||||
catch: request
|
||||
xpack.ml.get_filters:
|
||||
filter_id: "filter-foo"
|
||||
from: 0
|
||||
size: 1
|
||||
catch: request
|
||||
xpack.ml.get_filters:
|
||||
filter_id: "filter-foo"
|
||||
from: 0
|
||||
size: 1
|
||||
---
|
||||
"Test create filter api":
|
||||
- do:
|
||||
xpack.ml.put_filter:
|
||||
filter_id: filter-foo2
|
||||
body: >
|
||||
{
|
||||
"id": "filter-foo2",
|
||||
"items": ["abc", "xyz"]
|
||||
}
|
||||
|
||||
@ -104,16 +117,18 @@ setup:
|
||||
- match: { count: 1 }
|
||||
- match:
|
||||
filters.0:
|
||||
id: "filter-foo2"
|
||||
filter_id: "filter-foo2"
|
||||
items: ["abc", "xyz"]
|
||||
|
||||
---
|
||||
"Test create filter api without ID":
|
||||
"Test create filter api with mismatching body ID":
|
||||
- do:
|
||||
catch: /illegal_argument_exception/
|
||||
xpack.ml.put_filter:
|
||||
filter_id: "uri_id"
|
||||
body: >
|
||||
{
|
||||
"filter_id": "body_id",
|
||||
"items": ["abc", "xyz"]
|
||||
}
|
||||
|
||||
@ -168,7 +183,7 @@ setup:
|
||||
- match: { count: 1 }
|
||||
- match:
|
||||
filters.0:
|
||||
id: "filter-foo"
|
||||
filter_id: "filter-foo"
|
||||
items: ["abc", "xyz"]
|
||||
|
||||
- do:
|
||||
|
@ -3,9 +3,10 @@
|
||||
|
||||
- do:
|
||||
xpack.ml.put_filter:
|
||||
filter_id: filter-foo
|
||||
body: >
|
||||
{
|
||||
"id": "filter-foo",
|
||||
"filter_id": "filter-foo",
|
||||
"items": ["abc", "xyz"]
|
||||
}
|
||||
|
||||
|
@ -24,7 +24,7 @@ integTestRunner {
|
||||
'ml/delete_model_snapshot/Test delete snapshot missing snapshotId',
|
||||
'ml/delete_model_snapshot/Test delete snapshot missing job_id',
|
||||
'ml/delete_model_snapshot/Test delete with in-use model',
|
||||
'ml/filter_crud/Test create filter api without ID',
|
||||
'ml/filter_crud/Test create filter api with mismatching body ID',
|
||||
'ml/filter_crud/Test get filter API with bad ID',
|
||||
'ml/filter_crud/Test invalid param combinations',
|
||||
'ml/filter_crud/Test non-existing filter',
|
||||
|
Loading…
x
Reference in New Issue
Block a user