diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/EnrichClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/EnrichClient.java new file mode 100644 index 00000000000..420f11b23d8 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/EnrichClient.java @@ -0,0 +1,274 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.core.AcknowledgedResponse; +import org.elasticsearch.client.enrich.DeletePolicyRequest; +import org.elasticsearch.client.enrich.ExecutePolicyRequest; +import org.elasticsearch.client.enrich.ExecutePolicyResponse; +import org.elasticsearch.client.enrich.GetPolicyRequest; +import org.elasticsearch.client.enrich.GetPolicyResponse; +import org.elasticsearch.client.enrich.PutPolicyRequest; +import org.elasticsearch.client.enrich.StatsRequest; +import org.elasticsearch.client.enrich.StatsResponse; + +import java.io.IOException; +import java.util.Collections; + +/** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic enrich related methods + *

+ * See the + * X-Pack Enrich Policy APIs on elastic.co for more information. + */ +public final class EnrichClient { + + private final RestHighLevelClient restHighLevelClient; + + EnrichClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Executes the put policy api, which stores an enrich policy. + * + * See + * the docs for more. + * + * @param request the {@link PutPolicyRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public AcknowledgedResponse putPolicy(PutPolicyRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( + request, + EnrichRequestConverters::putPolicy, + options, + AcknowledgedResponse::fromXContent, + Collections.emptySet() + ); + } + + /** + * Asynchronously executes the put policy api, which stores an enrich policy. + * + * See + * the docs for more. + * + * @param request the {@link PutPolicyRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return cancellable that may be used to cancel the request + */ + public Cancellable putPolicyAsync(PutPolicyRequest request, + RequestOptions options, + ActionListener listener) { + return restHighLevelClient.performRequestAsyncAndParseEntity( + request, + EnrichRequestConverters::putPolicy, + options, + AcknowledgedResponse::fromXContent, + listener, + Collections.emptySet() + ); + } + + /** + * Executes the delete policy api, which deletes an enrich policy. + * + * See + * the docs for more. + * + * @param request the {@link DeletePolicyRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public AcknowledgedResponse deletePolicy(DeletePolicyRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( + request, + EnrichRequestConverters::deletePolicy, + options, + AcknowledgedResponse::fromXContent, + Collections.emptySet() + ); + } + + /** + * Asynchronously executes the delete policy api, which deletes an enrich policy. + * + * See + * the docs for more. + * + * @param request the {@link DeletePolicyRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return cancellable that may be used to cancel the request + */ + public Cancellable deletePolicyAsync(DeletePolicyRequest request, + RequestOptions options, + ActionListener listener) { + return restHighLevelClient.performRequestAsyncAndParseEntity( + request, + EnrichRequestConverters::deletePolicy, + options, + AcknowledgedResponse::fromXContent, + listener, + Collections.emptySet() + ); + } + + /** + * Executes the get policy api, which retrieves an enrich policy. + * + * See + * the docs for more. + * + * @param request the {@link PutPolicyRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetPolicyResponse getPolicy(GetPolicyRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( + request, + EnrichRequestConverters::getPolicy, + options, + GetPolicyResponse::fromXContent, + Collections.emptySet() + ); + } + + /** + * Asynchronously executes the get policy api, which retrieves an enrich policy. + * + * See + * the docs for more. + * + * @param request the {@link PutPolicyRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return cancellable that may be used to cancel the request + */ + public Cancellable getPolicyAsync(GetPolicyRequest request, + RequestOptions options, + ActionListener listener) { + return restHighLevelClient.performRequestAsyncAndParseEntity( + request, + EnrichRequestConverters::getPolicy, + options, + GetPolicyResponse::fromXContent, + listener, + Collections.emptySet() + ); + } + + /** + * Executes the enrich stats api, which retrieves enrich related stats. + * + * See + * the docs for more. + * + * @param request the {@link StatsRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public StatsResponse stats(StatsRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( + request, + EnrichRequestConverters::stats, + options, + StatsResponse::fromXContent, + Collections.emptySet() + ); + } + + /** + * Asynchronously executes the enrich stats api, which retrieves enrich related stats. + * + * See + * the docs for more. + * + * @param request the {@link StatsRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return cancellable that may be used to cancel the request + */ + public Cancellable statsAsync(StatsRequest request, + RequestOptions options, + ActionListener listener) { + return restHighLevelClient.performRequestAsyncAndParseEntity( + request, + EnrichRequestConverters::stats, + options, + StatsResponse::fromXContent, + listener, + Collections.emptySet() + ); + } + + /** + * Executes the execute policy api, which executes an enrich policy. + * + * See + * the docs for more. + * + * @param request the {@link ExecutePolicyRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public ExecutePolicyResponse executePolicy(ExecutePolicyRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( + request, + EnrichRequestConverters::executePolicy, + options, + ExecutePolicyResponse::fromXContent, + Collections.emptySet() + ); + } + + /** + * Asynchronously executes the execute policy api, which executes an enrich policy. + * + * See + * the docs for more. + * + * @param request the {@link ExecutePolicyRequest} + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return cancellable that may be used to cancel the request + */ + public Cancellable executePolicyAsync(ExecutePolicyRequest request, + RequestOptions options, + ActionListener listener) { + return restHighLevelClient.performRequestAsyncAndParseEntity( + request, + EnrichRequestConverters::executePolicy, + options, + ExecutePolicyResponse::fromXContent, + listener, + Collections.emptySet() + ); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/EnrichRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/EnrichRequestConverters.java new file mode 100644 index 00000000000..9e9e74fb005 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/EnrichRequestConverters.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.enrich.DeletePolicyRequest; +import org.elasticsearch.client.enrich.ExecutePolicyRequest; +import org.elasticsearch.client.enrich.GetPolicyRequest; +import org.elasticsearch.client.enrich.PutPolicyRequest; +import org.elasticsearch.client.enrich.StatsRequest; + +import java.io.IOException; + +import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; +import static org.elasticsearch.client.RequestConverters.createEntity; + +final class EnrichRequestConverters { + + static Request putPolicy(PutPolicyRequest putPolicyRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_enrich", "policy") + .addPathPart(putPolicyRequest.getName()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + request.setEntity(createEntity(putPolicyRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request deletePolicy(DeletePolicyRequest deletePolicyRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_enrich", "policy") + .addPathPart(deletePolicyRequest.getName()) + .build(); + return new Request(HttpDelete.METHOD_NAME, endpoint); + } + + static Request getPolicy(GetPolicyRequest getPolicyRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_enrich", "policy") + .addCommaSeparatedPathParts(getPolicyRequest.getNames()) + .build(); + return new Request(HttpGet.METHOD_NAME, endpoint); + } + + static Request stats(StatsRequest statsRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_enrich", "_stats") + .build(); + return new Request(HttpGet.METHOD_NAME, endpoint); + } + + static Request executePolicy(ExecutePolicyRequest executePolicyRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_enrich", "policy") + .addPathPart(executePolicyRequest.getName()) + .addPathPartAsIs("_execute") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + if (executePolicyRequest.getWaitForCompletion() != null) { + request.addParameter("wait_for_completion", executePolicyRequest.getWaitForCompletion().toString()); + } + return request; + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 171dfb174dc..3986d4e8f13 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -257,6 +257,7 @@ public class RestHighLevelClient implements Closeable { private final RollupClient rollupClient = new RollupClient(this); private final CcrClient ccrClient = new CcrClient(this); private final TransformClient transformClient = new TransformClient(this); + private final EnrichClient enrichClient = new EnrichClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -481,6 +482,10 @@ public class RestHighLevelClient implements Closeable { return transformClient; } + public EnrichClient enrich() { + return enrichClient; + } + /** * Executes a bulk request using the Bulk API. * See Bulk API on elastic.co diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/DeletePolicyRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/DeletePolicyRequest.java new file mode 100644 index 00000000000..745aeb30914 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/DeletePolicyRequest.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.common.Strings; + +public final class DeletePolicyRequest implements Validatable { + + private final String name; + + public DeletePolicyRequest(String name) { + if (Strings.hasLength(name) == false) { + throw new IllegalArgumentException("name must be a non-null and non-empty string"); + } + this.name = name; + } + + public String getName() { + return name; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/ExecutePolicyRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/ExecutePolicyRequest.java new file mode 100644 index 00000000000..f5f4151d2e6 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/ExecutePolicyRequest.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.Validatable; + +public final class ExecutePolicyRequest implements Validatable { + + private final String name; + private Boolean waitForCompletion; + + public ExecutePolicyRequest(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public Boolean getWaitForCompletion() { + return waitForCompletion; + } + + public void setWaitForCompletion(boolean waitForCompletion) { + this.waitForCompletion = waitForCompletion; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/ExecutePolicyResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/ExecutePolicyResponse.java new file mode 100644 index 00000000000..099a8c4c329 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/ExecutePolicyResponse.java @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +public final class ExecutePolicyResponse { + + private static final ParseField TASK_FIELD = new ParseField("task"); + private static final ParseField STATUS_FIELD = new ParseField("status"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "execute_policy_response", + true, + args -> new ExecutePolicyResponse((String) args[0], (ExecutionStatus) args[1]) + ); + + static { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TASK_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ExecutionStatus.PARSER, STATUS_FIELD); + } + + public static ExecutePolicyResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final String taskId; + private final ExecutionStatus executionStatus; + + ExecutePolicyResponse(String taskId, ExecutionStatus executionStatus) { + this.taskId = taskId; + this.executionStatus = executionStatus; + } + + public String getTaskId() { + return taskId; + } + + public ExecutionStatus getExecutionStatus() { + return executionStatus; + } + + public static final class ExecutionStatus { + + private static final ParseField PHASE_FIELD = new ParseField("phase"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "execution_status", + true, + args -> new ExecutionStatus((String) args[0]) + ); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), PHASE_FIELD); + } + + private final String phase; + + ExecutionStatus(String phase) { + this.phase = phase; + } + + public String getPhase() { + return phase; + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/GetPolicyRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/GetPolicyRequest.java new file mode 100644 index 00000000000..cada904b31e --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/GetPolicyRequest.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.Validatable; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public final class GetPolicyRequest implements Validatable { + + private final List names; + + public GetPolicyRequest() { + this(Collections.emptyList()); + } + + public GetPolicyRequest(String... names) { + this(Arrays.asList(names)); + } + + public GetPolicyRequest(List names) { + this.names = names; + } + + public List getNames() { + return names; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/GetPolicyResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/GetPolicyResponse.java new file mode 100644 index 00000000000..e09d4657828 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/GetPolicyResponse.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; + +public final class GetPolicyResponse { + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_policy_response", + true, + args -> new GetPolicyResponse((List) args[0]) + ); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser CONFIG_PARSER = new ConstructingObjectParser<>( + "config", + true, + args -> (NamedPolicy) args[0] + ); + + static { + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), + CONFIG_PARSER::apply, new ParseField("policies")); + CONFIG_PARSER.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> NamedPolicy.fromXContent(p), new ParseField("config")); + } + + private final List policies; + + public static GetPolicyResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + + public GetPolicyResponse(List policies) { + this.policies = policies; + } + + public List getPolicies() { + return policies; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/NamedPolicy.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/NamedPolicy.java new file mode 100644 index 00000000000..ea0ea52e892 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/NamedPolicy.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; + +public final class NamedPolicy { + + static final ParseField NAME_FIELD = new ParseField("name"); + static final ParseField QUERY_FIELD = new ParseField("query"); + static final ParseField INDICES_FIELD = new ParseField("indices"); + static final ParseField MATCH_FIELD_FIELD = new ParseField("match_field"); + static final ParseField ENRICH_FIELDS_FIELD = new ParseField("enrich_fields"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "policy", + true, + (args, policyType) -> new NamedPolicy( + policyType, + (String) args[0], + (BytesReference) args[1], + (List) args[2], + (String) args[3], + (List) args[4] + ) + ); + + static { + declareParserOptions(PARSER); + } + + private static void declareParserOptions(ConstructingObjectParser parser) { + parser.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD); + parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { + XContentBuilder builder = XContentBuilder.builder(p.contentType().xContent()); + builder.copyCurrentStructure(p); + return BytesArray.bytes(builder); + }, QUERY_FIELD); + parser.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_FIELD); + parser.declareString(ConstructingObjectParser.constructorArg(), MATCH_FIELD_FIELD); + parser.declareStringArray(ConstructingObjectParser.constructorArg(), ENRICH_FIELDS_FIELD); + } + + public static NamedPolicy fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + if (token != XContentParser.Token.START_OBJECT) { + token = parser.nextToken(); + } + if (token != XContentParser.Token.START_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + token = parser.nextToken(); + if (token != XContentParser.Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + String policyType = parser.currentName(); + NamedPolicy policy = PARSER.parse(parser, policyType); + token = parser.nextToken(); + if (token != XContentParser.Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + return policy; + } + + private final String type; + private final String name; + private final BytesReference query; + private final List indices; + private final String matchField; + private final List enrichFields; + + NamedPolicy(String type, String name, BytesReference query, List indices, String matchField, List enrichFields) { + this.type = type; + this.name = name; + this.query = query; + this.indices = indices; + this.matchField = matchField; + this.enrichFields = enrichFields; + } + + public String getType() { + return type; + } + + public String getName() { + return name; + } + + public BytesReference getQuery() { + return query; + } + + public List getIndices() { + return indices; + } + + public String getMatchField() { + return matchField; + } + + public List getEnrichFields() { + return enrichFields; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/PutPolicyRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/PutPolicyRequest.java new file mode 100644 index 00000000000..0eb902dfe46 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/PutPolicyRequest.java @@ -0,0 +1,148 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.query.QueryBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public final class PutPolicyRequest implements Validatable, ToXContentObject { + + private final String name; + private final String type; + private BytesReference query; + private final List indices; + private final String matchField; + private final List enrichFields; + + public PutPolicyRequest(String name, String type, List indices, String matchField, List enrichFields) { + if (Strings.hasLength(name) == false) { + throw new IllegalArgumentException("name must be a non-null and non-empty string"); + } + if (Strings.hasLength(type) == false) { + throw new IllegalArgumentException("type must be a non-null and non-empty string"); + } + if (indices == null || indices.isEmpty()) { + throw new IllegalArgumentException("indices must be specified"); + } + if (Strings.hasLength(matchField) == false) { + throw new IllegalArgumentException("matchField must be a non-null and non-empty string"); + } + if (enrichFields == null || enrichFields.isEmpty()) { + throw new IllegalArgumentException("enrichFields must be specified"); + } + + this.name = name; + this.type = type; + this.indices = indices; + this.matchField = matchField; + this.enrichFields = enrichFields; + } + + public String getName() { + return name; + } + + public String getType() { + return type; + } + + public BytesReference getQuery() { + return query; + } + + public void setQuery(BytesReference query) { + this.query = query; + } + + public void setQuery(QueryBuilder query) throws IOException { + setQuery(xContentToBytes(query)); + } + + public List getIndices() { + return indices; + } + + public String getMatchField() { + return matchField; + } + + public List getEnrichFields() { + return enrichFields; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.startObject(type); + { + builder.field(NamedPolicy.INDICES_FIELD.getPreferredName(), indices); + if (query != null) { + builder.field(NamedPolicy.QUERY_FIELD.getPreferredName(), asMap(query, builder.contentType())); + } + builder.field(NamedPolicy.MATCH_FIELD_FIELD.getPreferredName(), matchField); + builder.field(NamedPolicy.ENRICH_FIELDS_FIELD.getPreferredName(), enrichFields); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutPolicyRequest that = (PutPolicyRequest) o; + return Objects.equals(name, that.name) && + Objects.equals(type, that.type) && + Objects.equals(query, that.query) && + Objects.equals(indices, that.indices) && + Objects.equals(matchField, that.matchField) && + Objects.equals(enrichFields, that.enrichFields); + } + + @Override + public int hashCode() { + return Objects.hash(name, type, query, indices, matchField, enrichFields); + } + + private static BytesReference xContentToBytes(ToXContentObject object) throws IOException { + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + object.toXContent(builder, ToXContentObject.EMPTY_PARAMS); + return BytesReference.bytes(builder); + } + } + + static Map asMap(BytesReference bytesReference, XContentType xContentType) { + return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, xContentType).v2(); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/StatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/StatsRequest.java new file mode 100644 index 00000000000..c9a8223d5a1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/StatsRequest.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.Validatable; + +public final class StatsRequest implements Validatable { +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/StatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/StatsResponse.java new file mode 100644 index 00000000000..af6e03655f0 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/enrich/StatsResponse.java @@ -0,0 +1,191 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.tasks.TaskInfo; + +import java.util.List; +import java.util.Objects; + +public final class StatsResponse { + + private static ParseField EXECUTING_POLICIES_FIELD = new ParseField("executing_policies"); + private static ParseField COORDINATOR_STATS_FIELD = new ParseField("coordinator_stats"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "stats_response", + true, + args -> new StatsResponse((List) args[0], (List) args[1]) + ); + + static { + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), ExecutingPolicy.PARSER::apply, EXECUTING_POLICIES_FIELD); + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), CoordinatorStats.PARSER::apply, COORDINATOR_STATS_FIELD); + } + + public static StatsResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final List executingPolicies; + private final List coordinatorStats; + + public StatsResponse(List executingPolicies, List coordinatorStats) { + this.executingPolicies = executingPolicies; + this.coordinatorStats = coordinatorStats; + } + + public List getExecutingPolicies() { + return executingPolicies; + } + + public List getCoordinatorStats() { + return coordinatorStats; + } + + public static final class CoordinatorStats { + + static ParseField NODE_ID_FIELD = new ParseField("node_id"); + static ParseField QUEUE_SIZE_FIELD = new ParseField("queue_size"); + static ParseField REMOTE_REQUESTS_CONCURRENT_FIELD = new ParseField("remote_requests_current"); + static ParseField REMOTE_REQUESTS_TOTAL_FIELD = new ParseField("remote_requests_total"); + static ParseField EXECUTED_SEARCHES_FIELD = new ParseField("executed_searches_total"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "coordinator_stats_item", + true, + args -> new CoordinatorStats((String) args[0], (int) args[1], (int) args[2], (long) args[3], (long) args[4]) + ); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), NODE_ID_FIELD); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), QUEUE_SIZE_FIELD); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), REMOTE_REQUESTS_CONCURRENT_FIELD); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), REMOTE_REQUESTS_TOTAL_FIELD); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), EXECUTED_SEARCHES_FIELD); + } + + private final String nodeId; + private final int queueSize; + private final int remoteRequestsCurrent; + private final long remoteRequestsTotal; + private final long executedSearchesTotal; + + public CoordinatorStats(String nodeId, + int queueSize, + int remoteRequestsCurrent, + long remoteRequestsTotal, + long executedSearchesTotal) { + this.nodeId = nodeId; + this.queueSize = queueSize; + this.remoteRequestsCurrent = remoteRequestsCurrent; + this.remoteRequestsTotal = remoteRequestsTotal; + this.executedSearchesTotal = executedSearchesTotal; + } + + public String getNodeId() { + return nodeId; + } + + public int getQueueSize() { + return queueSize; + } + + public int getRemoteRequestsCurrent() { + return remoteRequestsCurrent; + } + + public long getRemoteRequestsTotal() { + return remoteRequestsTotal; + } + + public long getExecutedSearchesTotal() { + return executedSearchesTotal; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CoordinatorStats stats = (CoordinatorStats) o; + return Objects.equals(nodeId, stats.nodeId) && + queueSize == stats.queueSize && + remoteRequestsCurrent == stats.remoteRequestsCurrent && + remoteRequestsTotal == stats.remoteRequestsTotal && + executedSearchesTotal == stats.executedSearchesTotal; + } + + @Override + public int hashCode() { + return Objects.hash(nodeId, queueSize, remoteRequestsCurrent, remoteRequestsTotal, executedSearchesTotal); + } + } + + public static class ExecutingPolicy { + + static ParseField NAME_FIELD = new ParseField("name"); + static ParseField TASK_FIELD = new ParseField("task"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "executing_policy_item", + true, + args -> new ExecutingPolicy((String) args[0], (TaskInfo) args[1]) + ); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> TaskInfo.fromXContent(p), TASK_FIELD); + } + + private final String name; + private final TaskInfo taskInfo; + + public ExecutingPolicy(String name, TaskInfo taskInfo) { + this.name = name; + this.taskInfo = taskInfo; + } + + public String getName() { + return name; + } + + public TaskInfo getTaskInfo() { + return taskInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExecutingPolicy that = (ExecutingPolicy) o; + return name.equals(that.name) && + taskInfo.equals(that.taskInfo); + } + + @Override + public int hashCode() { + return Objects.hash(name, taskInfo); + } + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/privileges/Role.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/privileges/Role.java index ba98dc6c6eb..3ef0d6dd332 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/privileges/Role.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/user/privileges/Role.java @@ -322,10 +322,12 @@ public final class Role { public static final String READ_CCR = "read_ccr"; public static final String MANAGE_ILM = "manage_ilm"; public static final String READ_ILM = "read_ilm"; + public static final String MANAGE_ENRICH = "manage_enrich"; public static final String[] ALL_ARRAY = new String[] { NONE, ALL, MONITOR, MONITOR_TRANSFORM_DEPRECATED, MONITOR_TRANSFORM, MONITOR_ML, MONITOR_WATCHER, MONITOR_ROLLUP, MANAGE, MANAGE_TRANSFORM_DEPRECATED, MANAGE_TRANSFORM, MANAGE_ML, MANAGE_WATCHER, MANAGE_ROLLUP, MANAGE_INDEX_TEMPLATES, MANAGE_INGEST_PIPELINES, TRANSPORT_CLIENT, - MANAGE_SECURITY, MANAGE_SAML, MANAGE_OIDC, MANAGE_TOKEN, MANAGE_PIPELINE, MANAGE_CCR, READ_CCR, MANAGE_ILM, READ_ILM}; + MANAGE_SECURITY, MANAGE_SAML, MANAGE_OIDC, MANAGE_TOKEN, MANAGE_PIPELINE, MANAGE_CCR, READ_CCR, MANAGE_ILM, READ_ILM, + MANAGE_ENRICH }; } /** diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/EnrichIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/EnrichIT.java new file mode 100644 index 00000000000..fc9add73829 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/EnrichIT.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.client.core.AcknowledgedResponse; +import org.elasticsearch.client.enrich.DeletePolicyRequest; +import org.elasticsearch.client.enrich.ExecutePolicyRequest; +import org.elasticsearch.client.enrich.ExecutePolicyResponse; +import org.elasticsearch.client.enrich.GetPolicyRequest; +import org.elasticsearch.client.enrich.GetPolicyResponse; +import org.elasticsearch.client.enrich.PutPolicyRequest; +import org.elasticsearch.client.enrich.StatsRequest; +import org.elasticsearch.client.enrich.StatsResponse; +import org.elasticsearch.client.indices.CreateIndexRequest; + +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class EnrichIT extends ESRestHighLevelClientTestCase { + + public void testCRUD() throws Exception { + CreateIndexRequest createIndexRequest = new CreateIndexRequest("my-index") + .mapping(Collections.singletonMap("properties", Collections.singletonMap("enrich_key", + Collections.singletonMap("type", "keyword")))); + highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); + + final EnrichClient enrichClient = highLevelClient().enrich(); + PutPolicyRequest putPolicyRequest = new PutPolicyRequest("my-policy", "match", + Collections.singletonList("my-index"), "enrich_key", Collections.singletonList("enrich_value")); + AcknowledgedResponse putPolicyResponse = execute(putPolicyRequest, enrichClient::putPolicy, enrichClient::putPolicyAsync); + assertThat(putPolicyResponse.isAcknowledged(), is(true)); + + GetPolicyRequest getPolicyRequest = randomBoolean() ? new GetPolicyRequest("my-policy") : new GetPolicyRequest(); + GetPolicyResponse getPolicyResponse = execute(getPolicyRequest, enrichClient::getPolicy, enrichClient::getPolicyAsync); + assertThat(getPolicyResponse.getPolicies().size(), equalTo(1)); + assertThat(getPolicyResponse.getPolicies().get(0).getType(), equalTo(putPolicyRequest.getType())); + assertThat(getPolicyResponse.getPolicies().get(0).getIndices(), equalTo(putPolicyRequest.getIndices())); + assertThat(getPolicyResponse.getPolicies().get(0).getMatchField(), equalTo(putPolicyRequest.getMatchField())); + assertThat(getPolicyResponse.getPolicies().get(0).getEnrichFields(), equalTo(putPolicyRequest.getEnrichFields())); + + StatsRequest statsRequest = new StatsRequest(); + StatsResponse statsResponse = execute(statsRequest, enrichClient::stats, enrichClient::statsAsync); + assertThat(statsResponse.getExecutingPolicies().size(), equalTo(0)); + assertThat(statsResponse.getCoordinatorStats().size(), equalTo(1)); + assertThat(statsResponse.getCoordinatorStats().get(0).getNodeId(), notNullValue()); + assertThat(statsResponse.getCoordinatorStats().get(0).getQueueSize(), greaterThanOrEqualTo(0)); + assertThat(statsResponse.getCoordinatorStats().get(0).getRemoteRequestsCurrent(), greaterThanOrEqualTo(0)); + assertThat(statsResponse.getCoordinatorStats().get(0).getRemoteRequestsTotal(), greaterThanOrEqualTo(0L)); + assertThat(statsResponse.getCoordinatorStats().get(0).getExecutedSearchesTotal(), greaterThanOrEqualTo(0L)); + + ExecutePolicyRequest executePolicyRequest = new ExecutePolicyRequest("my-policy"); + ExecutePolicyResponse executePolicyResponse = + execute(executePolicyRequest, enrichClient::executePolicy, enrichClient::executePolicyAsync); + assertThat(executePolicyResponse.getExecutionStatus().getPhase(), equalTo("COMPLETE")); + + DeletePolicyRequest deletePolicyRequest = new DeletePolicyRequest("my-policy"); + AcknowledgedResponse deletePolicyResponse = + execute(deletePolicyRequest, enrichClient::deletePolicy, enrichClient::deletePolicyAsync); + assertThat(deletePolicyResponse.isAcknowledged(), is(true)); + + getPolicyRequest = new GetPolicyRequest(); + getPolicyResponse = execute(getPolicyRequest, enrichClient::getPolicy, enrichClient::getPolicyAsync); + assertThat(getPolicyResponse.getPolicies().size(), equalTo(0)); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/EnrichRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/EnrichRequestConvertersTests.java new file mode 100644 index 00000000000..8b649a6b47c --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/EnrichRequestConvertersTests.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.enrich.DeletePolicyRequest; +import org.elasticsearch.client.enrich.ExecutePolicyRequest; +import org.elasticsearch.client.enrich.GetPolicyRequest; +import org.elasticsearch.client.enrich.PutPolicyRequest; +import org.elasticsearch.client.enrich.PutPolicyRequestTests; +import org.elasticsearch.client.enrich.StatsRequest; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class EnrichRequestConvertersTests extends ESTestCase { + + public void testPutPolicy() throws Exception { + PutPolicyRequest request = PutPolicyRequestTests.createTestInstance(); + Request result = EnrichRequestConverters.putPolicy(request); + + assertThat(result.getMethod(), equalTo(HttpPut.METHOD_NAME)); + assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getName())); + assertThat(result.getParameters().size(), equalTo(0)); + RequestConvertersTests.assertToXContentBody(request, result.getEntity()); + } + + public void testDeletePolicy() { + DeletePolicyRequest request = new DeletePolicyRequest(randomAlphaOfLength(4)); + Request result = EnrichRequestConverters.deletePolicy(request); + + assertThat(result.getMethod(), equalTo(HttpDelete.METHOD_NAME)); + assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getName())); + assertThat(result.getParameters().size(), equalTo(0)); + assertThat(result.getEntity(), nullValue()); + } + + public void testGetPolicy() { + GetPolicyRequest request = new GetPolicyRequest(randomAlphaOfLength(4)); + Request result = EnrichRequestConverters.getPolicy(request); + + assertThat(result.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getNames().get(0))); + assertThat(result.getParameters().size(), equalTo(0)); + assertThat(result.getEntity(), nullValue()); + + request = new GetPolicyRequest(randomAlphaOfLength(4), randomAlphaOfLength(4)); + result = EnrichRequestConverters.getPolicy(request); + + assertThat(result.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getNames().get(0) + "," + request.getNames().get(1))); + assertThat(result.getParameters().size(), equalTo(0)); + assertThat(result.getEntity(), nullValue()); + + request = new GetPolicyRequest(); + result = EnrichRequestConverters.getPolicy(request); + + assertThat(result.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(result.getEndpoint(), equalTo("/_enrich/policy")); + assertThat(result.getParameters().size(), equalTo(0)); + assertThat(result.getEntity(), nullValue()); + } + + public void testStats() { + StatsRequest request = new StatsRequest(); + Request result = EnrichRequestConverters.stats(request); + + assertThat(result.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(result.getEndpoint(), equalTo("/_enrich/_stats")); + assertThat(result.getParameters().size(), equalTo(0)); + assertThat(result.getEntity(), nullValue()); + } + + public void testExecutePolicy() { + ExecutePolicyRequest request = new ExecutePolicyRequest(randomAlphaOfLength(4)); + Request result = EnrichRequestConverters.executePolicy(request); + + assertThat(result.getMethod(), equalTo(HttpPost.METHOD_NAME)); + assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getName() + "/_execute")); + assertThat(result.getParameters().size(), equalTo(0)); + assertThat(result.getEntity(), nullValue()); + + request = new ExecutePolicyRequest(randomAlphaOfLength(4)); + request.setWaitForCompletion(randomBoolean()); + result = EnrichRequestConverters.executePolicy(request); + + assertThat(result.getMethod(), equalTo(HttpPost.METHOD_NAME)); + assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getName() + "/_execute")); + assertThat(result.getParameters().size(), equalTo(1)); + assertThat(result.getParameters().get("wait_for_completion"), equalTo(request.getWaitForCompletion().toString())); + assertThat(result.getEntity(), nullValue()); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 59df6ea93a0..2805f64adbf 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -857,6 +857,7 @@ public class RestHighLevelClientTests extends ESTestCase { apiName.startsWith("security.") == false && apiName.startsWith("index_lifecycle.") == false && apiName.startsWith("ccr.") == false && + apiName.startsWith("enrich.") == false && apiName.startsWith("transform.") == false && apiName.endsWith("freeze") == false && apiName.endsWith("reload_analyzers") == false && diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/EnrichDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/EnrichDocumentationIT.java new file mode 100644 index 00000000000..d4c26d6c9e9 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/EnrichDocumentationIT.java @@ -0,0 +1,314 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.core.AcknowledgedResponse; +import org.elasticsearch.client.enrich.DeletePolicyRequest; +import org.elasticsearch.client.enrich.ExecutePolicyRequest; +import org.elasticsearch.client.enrich.ExecutePolicyResponse; +import org.elasticsearch.client.enrich.NamedPolicy; +import org.elasticsearch.client.enrich.GetPolicyRequest; +import org.elasticsearch.client.enrich.GetPolicyResponse; +import org.elasticsearch.client.enrich.PutPolicyRequest; +import org.elasticsearch.client.enrich.StatsRequest; +import org.elasticsearch.client.enrich.StatsResponse; +import org.elasticsearch.client.enrich.StatsResponse.CoordinatorStats; +import org.elasticsearch.client.enrich.StatsResponse.ExecutingPolicy; +import org.elasticsearch.client.indices.CreateIndexRequest; +import org.junit.After; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +public class EnrichDocumentationIT extends ESRestHighLevelClientTestCase { + + @After + public void cleanup() { + RestHighLevelClient client = highLevelClient(); + DeletePolicyRequest deletePolicyRequest = new DeletePolicyRequest("users-policy"); + try { + client.enrich().deletePolicy(deletePolicyRequest, RequestOptions.DEFAULT); + } catch (Exception e) { + // ignore... it is ok if policy has already been removed + } + } + + public void testPutPolicy() throws Exception { + RestHighLevelClient client = highLevelClient(); + // tag::enrich-put-policy-request + PutPolicyRequest putPolicyRequest = new PutPolicyRequest( + "users-policy", "match", Arrays.asList("users"), + "email", Arrays.asList("address", "zip", "city", "state")); + // end::enrich-put-policy-request + + // tag::enrich-put-policy-execute + AcknowledgedResponse putPolicyResponse = + client.enrich().putPolicy(putPolicyRequest, RequestOptions.DEFAULT); + // end::enrich-put-policy-execute + + // tag::enrich-put-policy-response + boolean isAcknowledged = + putPolicyResponse.isAcknowledged(); // <1> + // end::enrich-put-policy-response + + // tag::enrich-put-policy-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse response) { // <1> + boolean isAcknowledged = response.isAcknowledged(); + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::enrich-put-policy-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::enrich-put-policy-execute-async + client.enrich().putPolicyAsync(putPolicyRequest, + RequestOptions.DEFAULT, listener); // <1> + // end::enrich-put-policy-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + public void testDeletePolicy() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + // Add a policy, so that it can be deleted: + PutPolicyRequest putPolicyRequest = new PutPolicyRequest( + "users-policy", "match", Arrays.asList("users"), + "email", Arrays.asList("address", "zip", "city", "state")); + client.enrich().putPolicy(putPolicyRequest, RequestOptions.DEFAULT); + } + + // tag::enrich-delete-policy-request + DeletePolicyRequest deletePolicyRequest = + new DeletePolicyRequest("users-policy"); + // end::enrich-delete-policy-request + + // tag::enrich-delete-policy-execute + AcknowledgedResponse deletePolicyResponse = client.enrich() + .deletePolicy(deletePolicyRequest, RequestOptions.DEFAULT); + // end::enrich-delete-policy-execute + + // tag::enrich-delete-policy-response + boolean isAcknowledged = + deletePolicyResponse.isAcknowledged(); // <1> + // end::enrich-delete-policy-response + + // tag::enrich-delete-policy-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse response) { // <1> + boolean isAcknowledged = response.isAcknowledged(); + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::enrich-delete-policy-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::enrich-delete-policy-execute-async + client.enrich().deletePolicyAsync(deletePolicyRequest, + RequestOptions.DEFAULT, listener); // <1> + // end::enrich-delete-policy-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + public void testGetPolicy() throws Exception { + RestHighLevelClient client = highLevelClient(); + + PutPolicyRequest putPolicyRequest = new PutPolicyRequest( + "users-policy", "match", Collections.singletonList("users"), + "email", Arrays.asList("address", "zip", "city", "state")); + client.enrich().putPolicy(putPolicyRequest, RequestOptions.DEFAULT); + + // tag::enrich-get-policy-request + GetPolicyRequest getPolicyRequest = new GetPolicyRequest("users-policy"); + // end::enrich-get-policy-request + + // tag::enrich-get-policy-execute + GetPolicyResponse getPolicyResponse = + client.enrich().getPolicy(getPolicyRequest, RequestOptions.DEFAULT); + // end::enrich-get-policy-execute + + // tag::enrich-get-policy-response + List policies = getPolicyResponse.getPolicies(); // <1> + NamedPolicy policy = policies.get(0); + // end::enrich-get-policy-response + + // tag::enrich-get-policy-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetPolicyResponse response) { // <1> + List policies = response.getPolicies(); + NamedPolicy policy = policies.get(0); + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::enrich-get-policy-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::enrich-get-policy-execute-async + client.enrich().getPolicyAsync(getPolicyRequest, + RequestOptions.DEFAULT, listener); // <1> + // end::enrich-get-policy-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + public void testStats() throws Exception { + RestHighLevelClient client = highLevelClient(); + + // tag::enrich-stats-request + StatsRequest statsRequest = new StatsRequest(); + // end::enrich-stats-request + + // tag::enrich-stats-execute + StatsResponse statsResponse = + client.enrich().stats(statsRequest, RequestOptions.DEFAULT); + // end::enrich-stats-execute + + // tag::enrich-stats-response + List executingPolicies = + statsResponse.getExecutingPolicies(); // <1> + List coordinatorStats = + statsResponse.getCoordinatorStats(); // <2> + // end::enrich-stats-response + + // tag::enrich-stats-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(StatsResponse response) { // <1> + List executingPolicies = + statsResponse.getExecutingPolicies(); + List coordinatorStats = + statsResponse.getCoordinatorStats(); + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::enrich-stats-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::enrich-stats-execute-async + client.enrich().statsAsync(statsRequest, RequestOptions.DEFAULT, + listener); // <1> + // end::enrich-stats-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + public void testExecutePolicy() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + CreateIndexRequest createIndexRequest = new CreateIndexRequest("users") + .mapping(Collections.singletonMap("properties", Collections.singletonMap("email", + Collections.singletonMap("type", "keyword")))); + client.indices().create(createIndexRequest, RequestOptions.DEFAULT); + PutPolicyRequest putPolicyRequest = new PutPolicyRequest( + "users-policy", "match", Collections.singletonList("users"), + "email", Arrays.asList("address", "zip", "city", "state")); + client.enrich().putPolicy(putPolicyRequest, RequestOptions.DEFAULT); + } + + // tag::enrich-execute-policy-request + ExecutePolicyRequest request = + new ExecutePolicyRequest("users-policy"); + // end::enrich-execute-policy-request + + // tag::enrich-execute-policy-execute + ExecutePolicyResponse response = + client.enrich().executePolicy(request, RequestOptions.DEFAULT); + // end::enrich-execute-policy-execute + + // tag::enrich-execute-policy-response + ExecutePolicyResponse.ExecutionStatus status = + response.getExecutionStatus(); + // end::enrich-execute-policy-response + + // tag::enrich-execute-policy-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(ExecutePolicyResponse response) { // <1> + ExecutePolicyResponse.ExecutionStatus status = + response.getExecutionStatus(); + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::enrich-execute-policy-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::enrich-execute-policy-execute-async + client.enrich().executePolicyAsync(request, RequestOptions.DEFAULT, + listener); // <1> + // end::enrich-execute-policy-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java index cdf7fb16a40..2093e14d7c8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java @@ -681,7 +681,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase { List roles = response.getRoles(); assertNotNull(response); // 29 system roles plus the three we created - assertThat(roles.size(), equalTo(32)); + assertThat(roles.size(), equalTo(33)); } { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/ExecutePolicyResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/ExecutePolicyResponseTests.java new file mode 100644 index 00000000000..cb7bdd51056 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/ExecutePolicyResponseTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.AbstractResponseTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ExecutePolicyResponseTests extends AbstractResponseTestCase { + + @Override + protected ExecuteEnrichPolicyAction.Response createServerTestInstance(XContentType xContentType) { + if (randomBoolean()) { + return new ExecuteEnrichPolicyAction.Response(new ExecuteEnrichPolicyStatus(randomAlphaOfLength(4))); + } else { + return new ExecuteEnrichPolicyAction.Response(new TaskId(randomAlphaOfLength(4), randomNonNegativeLong())); + } + } + + @Override + protected ExecutePolicyResponse doParseToClientInstance(XContentParser parser) throws IOException { + return ExecutePolicyResponse.fromXContent(parser); + } + + @Override + protected void assertInstances(ExecuteEnrichPolicyAction.Response serverTestInstance, ExecutePolicyResponse clientInstance) { + if (serverTestInstance.getStatus() != null) { + assertThat(clientInstance.getExecutionStatus().getPhase(), equalTo(serverTestInstance.getStatus().getPhase())); + assertThat(clientInstance.getTaskId(), nullValue()); + } else if (serverTestInstance.getTaskId() != null) { + assertThat(clientInstance.getTaskId(), equalTo(clientInstance.getTaskId())); + assertThat(clientInstance.getExecutionStatus(), nullValue()); + } else { + assert false; + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/GetPolicyResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/GetPolicyResponseTests.java new file mode 100644 index 00000000000..fc0cfb73339 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/GetPolicyResponseTests.java @@ -0,0 +1,95 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.AbstractResponseTestCase; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class GetPolicyResponseTests extends AbstractResponseTestCase { + + @Override + protected GetEnrichPolicyAction.Response createServerTestInstance(XContentType xContentType) { + int numPolicies = randomIntBetween(0, 8); + Map policies = new HashMap<>(numPolicies); + for (int i = 0; i < numPolicies; i++) { + policies.put(randomAlphaOfLength(4), createRandomEnrichPolicy(xContentType)); + } + return new GetEnrichPolicyAction.Response(policies); + } + + @Override + protected GetPolicyResponse doParseToClientInstance(XContentParser parser) throws IOException { + return GetPolicyResponse.fromXContent(parser); + } + + @Override + protected void assertInstances(GetEnrichPolicyAction.Response serverTestInstance, GetPolicyResponse clientInstance) { + assertThat(clientInstance.getPolicies().size(), equalTo(serverTestInstance.getPolicies().size())); + for (int i = 0; i < clientInstance.getPolicies().size(); i++) { + assertThat(clientInstance.getPolicies().get(i).getType(), + equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getType())); + assertThat(clientInstance.getPolicies().get(i).getName(), + equalTo(serverTestInstance.getPolicies().get(i).getName())); + assertThat(clientInstance.getPolicies().get(i).getIndices(), + equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getIndices())); + if (clientInstance.getPolicies().get(i).getQuery() != null) { + assertThat(clientInstance.getPolicies().get(i).getQuery(), + equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getQuery().getQuery())); + } else { + assertThat(serverTestInstance.getPolicies().get(i).getPolicy().getQuery(), nullValue()); + } + assertThat(clientInstance.getPolicies().get(i).getMatchField(), + equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getMatchField())); + assertThat(clientInstance.getPolicies().get(i).getEnrichFields(), + equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getEnrichFields())); + } + } + + private static EnrichPolicy createRandomEnrichPolicy(XContentType xContentType){ + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + builder.startObject(); + builder.endObject(); + BytesReference querySource = BytesArray.bytes(builder); + return new EnrichPolicy( + randomAlphaOfLength(4), + randomBoolean() ? new EnrichPolicy.QuerySource(querySource, xContentType) : null, + Arrays.asList(generateRandomStringArray(8, 4, false, false)), + randomAlphaOfLength(4), + Arrays.asList(generateRandomStringArray(8, 4, false, false)) + ); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/PutPolicyRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/PutPolicyRequestTests.java new file mode 100644 index 00000000000..a0138949490 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/PutPolicyRequestTests.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.AbstractRequestTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class PutPolicyRequestTests extends AbstractRequestTestCase { + + public void testValidate() { + PutPolicyRequest request = createClientTestInstance(); + assertThat(request.validate().isPresent(), is(false)); + + Exception e = expectThrows(IllegalArgumentException.class, + () -> new PutPolicyRequest(request.getName(), request.getType(), request.getIndices(), null, request.getEnrichFields())); + assertThat(e.getMessage(), containsString("matchField must be a non-null and non-empty string")); + } + + public void testEqualsAndHashcode() { + PutPolicyRequest testInstance = createTestInstance(); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(testInstance, (original) -> { + PutPolicyRequest copy = new PutPolicyRequest(original.getName(), original.getType(), original.getIndices(), + original.getMatchField(), original.getEnrichFields()); + copy.setQuery(original.getQuery()); + return copy; + }); + } + + @Override + protected PutPolicyRequest createClientTestInstance() { + return createTestInstance("name"); + } + + public static PutPolicyRequest createTestInstance() { + return createTestInstance(randomAlphaOfLength(4)); + } + + public static PutPolicyRequest createTestInstance(String name) { + PutPolicyRequest testInstance = new PutPolicyRequest( + name, + randomAlphaOfLength(4), + Arrays.asList(generateRandomStringArray(4, 4, false, false)), + randomAlphaOfLength(4), + Arrays.asList(generateRandomStringArray(4, 4, false, false)) + ); + if (randomBoolean()) { + try { + testInstance.setQuery(new MatchAllQueryBuilder()); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + return testInstance; + } + + @Override + protected PutEnrichPolicyAction.Request doParseToServerInstance(XContentParser parser) throws IOException { + return PutEnrichPolicyAction.fromXContent(parser, "name"); + } + + @Override + protected void assertInstances(PutEnrichPolicyAction.Request serverInstance, PutPolicyRequest clientTestInstance) { + assertThat(clientTestInstance.getName(), equalTo(serverInstance.getName())); + assertThat(clientTestInstance.getType(), equalTo(serverInstance.getPolicy().getType())); + assertThat(clientTestInstance.getIndices(), equalTo(serverInstance.getPolicy().getIndices())); + if (clientTestInstance.getQuery() != null) { + XContentType type = serverInstance.getPolicy().getQuery().getContentType(); + assertThat(PutPolicyRequest.asMap(clientTestInstance.getQuery(), type), + equalTo(PutPolicyRequest.asMap(serverInstance.getPolicy().getQuery().getQuery(), type))); + } else { + assertThat(serverInstance.getPolicy().getQuery(), nullValue()); + } + assertThat(clientTestInstance.getMatchField(), equalTo(serverInstance.getPolicy().getMatchField())); + assertThat(clientTestInstance.getEnrichFields(), equalTo(serverInstance.getPolicy().getEnrichFields())); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/StatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/StatsResponseTests.java new file mode 100644 index 00000000000..aac22348abb --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/enrich/StatsResponseTests.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.enrich; + +import org.elasticsearch.client.AbstractResponseTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class StatsResponseTests extends AbstractResponseTestCase { + + @Override + protected EnrichStatsAction.Response createServerTestInstance(XContentType xContentType) { + int numExecutingPolicies = randomIntBetween(0, 16); + List executingPolicies = new ArrayList<>(numExecutingPolicies); + for (int i = 0; i < numExecutingPolicies; i++) { + TaskInfo taskInfo = randomTaskInfo(); + executingPolicies.add(new EnrichStatsAction.Response.ExecutingPolicy(randomAlphaOfLength(4), taskInfo)); + } + int numCoordinatingStats = randomIntBetween(0, 16); + List coordinatorStats = new ArrayList<>(numCoordinatingStats); + for (int i = 0; i < numCoordinatingStats; i++) { + EnrichStatsAction.Response.CoordinatorStats stats = new EnrichStatsAction.Response.CoordinatorStats( + randomAlphaOfLength(4), randomIntBetween(0, 8096), randomIntBetween(0, 8096), randomNonNegativeLong(), + randomNonNegativeLong()); + coordinatorStats.add(stats); + } + return new EnrichStatsAction.Response(executingPolicies, coordinatorStats); + } + + @Override + protected StatsResponse doParseToClientInstance(XContentParser parser) throws IOException { + return StatsResponse.fromXContent(parser); + } + + @Override + protected void assertInstances(EnrichStatsAction.Response serverTestInstance, StatsResponse clientInstance) { + assertThat(clientInstance.getExecutingPolicies().size(), equalTo(serverTestInstance.getExecutingPolicies().size())); + for (int i = 0; i < clientInstance.getExecutingPolicies().size(); i++) { + StatsResponse.ExecutingPolicy actual = clientInstance.getExecutingPolicies().get(i); + EnrichStatsAction.Response.ExecutingPolicy expected = serverTestInstance.getExecutingPolicies().get(i); + assertThat(actual.getName(), equalTo(expected.getName())); + assertThat(actual.getTaskInfo(), equalTo(actual.getTaskInfo())); + } + + assertThat(clientInstance.getCoordinatorStats().size(), equalTo(serverTestInstance.getCoordinatorStats().size())); + for (int i = 0; i < clientInstance.getCoordinatorStats().size(); i++) { + StatsResponse.CoordinatorStats actual = clientInstance.getCoordinatorStats().get(i); + EnrichStatsAction.Response.CoordinatorStats expected = serverTestInstance.getCoordinatorStats().get(i); + assertThat(actual.getNodeId(), equalTo(expected.getNodeId())); + assertThat(actual.getQueueSize(), equalTo(expected.getQueueSize())); + assertThat(actual.getRemoteRequestsCurrent(), equalTo(expected.getRemoteRequestsCurrent())); + assertThat(actual.getRemoteRequestsTotal(), equalTo(expected.getRemoteRequestsTotal())); + assertThat(actual.getExecutedSearchesTotal(), equalTo(expected.getExecutedSearchesTotal())); + } + } + + private static TaskInfo randomTaskInfo() { + TaskId taskId = new TaskId(randomAlphaOfLength(5), randomLong()); + String type = randomAlphaOfLength(5); + String action = randomAlphaOfLength(5); + String description = randomAlphaOfLength(5); + long startTime = randomLong(); + long runningTimeNanos = randomLong(); + boolean cancellable = randomBoolean(); + TaskId parentTaskId = TaskId.EMPTY_TASK_ID; + Map headers = randomBoolean() ? + Collections.emptyMap() : + Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)); + return new TaskInfo(taskId, type, action, description, null, startTime, runningTimeNanos, cancellable, parentTaskId, headers); + } +} diff --git a/docs/java-rest/high-level/enrich/delete_policy.asciidoc b/docs/java-rest/high-level/enrich/delete_policy.asciidoc new file mode 100644 index 00000000000..9bee686cce0 --- /dev/null +++ b/docs/java-rest/high-level/enrich/delete_policy.asciidoc @@ -0,0 +1,31 @@ +-- +:api: enrich-delete-policy +:request: DeletePolicyRequest +:response: AcknowledgedResponse +-- + +[id="{upid}-{api}"] +=== Delete Policy API + +[id="{upid}-{api}-request"] +==== Request + +The Delete Policy API deletes an enrich policy from Elasticsearch. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- + +[id="{upid}-{api}-response"] +==== Response + +The returned +{response}+ indicates if the delete policy request was acknowledged. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> Whether delete policy request was acknowledged. + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/enrich/execute_policy.asciidoc b/docs/java-rest/high-level/enrich/execute_policy.asciidoc new file mode 100644 index 00000000000..59594f1b741 --- /dev/null +++ b/docs/java-rest/high-level/enrich/execute_policy.asciidoc @@ -0,0 +1,30 @@ +-- +:api: enrich-execute-policy +:request: ExecutePolicyRequest +:response: ExecutePolicyResponse +-- + +[id="{upid}-{api}"] +=== Execute Policy API + +[id="{upid}-{api}-request"] +==== Request + +The Execute Policy API allows to execute an enrich policy by name. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- + +[id="{upid}-{api}-response"] +==== Response + +The returned +{response}+ includes either the status or task id. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/enrich/get_policy.asciidoc b/docs/java-rest/high-level/enrich/get_policy.asciidoc new file mode 100644 index 00000000000..401a78ccca6 --- /dev/null +++ b/docs/java-rest/high-level/enrich/get_policy.asciidoc @@ -0,0 +1,32 @@ +-- +:api: enrich-get-policy +:request: GetPolicyRequest +:response: GetPolicyResponse +-- + +[id="{upid}-{api}"] +=== Get Policy API + +[id="{upid}-{api}-request"] +==== Request + +The Get Policy API allows to retrieve enrich policies by name +or all policies if no name is provided. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- + +[id="{upid}-{api}-response"] +==== Response + +The returned +{response}+ includes the requested enrich policy. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> The actual enrich policy. + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/enrich/put_policy.asciidoc b/docs/java-rest/high-level/enrich/put_policy.asciidoc new file mode 100644 index 00000000000..b8e9475bed1 --- /dev/null +++ b/docs/java-rest/high-level/enrich/put_policy.asciidoc @@ -0,0 +1,31 @@ +-- +:api: enrich-put-policy +:request: PutPolicyRequest +:response: AcknowledgedResponse +-- + +[id="{upid}-{api}"] +=== Put Policy API + +[id="{upid}-{api}-request"] +==== Request + +The Put Policy API stores an enrich policy in Elasticsearch. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- + +[id="{upid}-{api}-response"] +==== Response + +The returned +{response}+ indicates if the put policy request was acknowledged. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> Whether put policy request was acknowledged. + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/enrich/stats.asciidoc b/docs/java-rest/high-level/enrich/stats.asciidoc new file mode 100644 index 00000000000..1d4ae50238a --- /dev/null +++ b/docs/java-rest/high-level/enrich/stats.asciidoc @@ -0,0 +1,33 @@ +-- +:api: enrich-stats +:request: StatsRequest +:response: StatsResponse +-- + +[id="{upid}-{api}"] +=== Stats API + +[id="{upid}-{api}-request"] +==== Request + +The stats API returns enrich related stats. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- + +[id="{upid}-{api}-response"] +==== Response + +The returned +{response}+ includes enrich related stats. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> List of policies that are currently executing with + additional details. +<2> List of coordinator stats per ingest node. + +include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index a6975a97326..9cf1930b387 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -635,3 +635,22 @@ include::transform/delete_transform.asciidoc[] include::transform/preview_transform.asciidoc[] include::transform/start_transform.asciidoc[] include::transform/stop_transform.asciidoc[] + +== Enrich APIs + +:upid: {mainid}-enrich +:doc-tests-file: {doc-tests}/EnrichDocumentationIT.java + +The Java High Level REST Client supports the following Enrich APIs: + +* <<{upid}-enrich-put-policy>> +* <<{upid}-enrich-delete-policy>> +* <<{upid}-enrich-get-policy>> +* <<{upid}-enrich-stats>> +* <<{upid}-enrich-execute-policy>> + +include::enrich/put_policy.asciidoc[] +include::enrich/delete_policy.asciidoc[] +include::enrich/get_policy.asciidoc[] +include::enrich/stats.asciidoc[] +include::enrich/execute_policy.asciidoc[] diff --git a/docs/reference/ingest/apis/enrich/delete-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/delete-enrich-policy.asciidoc new file mode 100644 index 00000000000..f0ebd40ff41 --- /dev/null +++ b/docs/reference/ingest/apis/enrich/delete-enrich-policy.asciidoc @@ -0,0 +1,67 @@ +[role="xpack"] +[testenv="basic"] +[[delete-enrich-policy-api]] +=== Delete enrich policy API +++++ +Delete enrich policy +++++ + +Deletes an existing enrich policy and its enrich index. + +//// +[source,console] +---- +PUT /users + +PUT /_enrich/policy/my-policy +{ + "match": { + "indices": "users", + "match_field": "email", + "enrich_fields": ["first_name", "last_name", "city", "zip", "state"] + } +} +---- +// TESTSETUP +//// + +[source,console] +-------------------------------------------------- +DELETE /_enrich/policy/my-policy +-------------------------------------------------- + + +[[delete-enrich-policy-api-request]] +==== {api-request-title} + +`DELETE /_enrich/policy/` + + +[[delete-enrich-policy-api-prereqs]] +==== {api-prereq-title} + +include::put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] + + +[[delete-enrich-policy-api-desc]] +==== {api-description-title} + +Use the delete enrich policy API +to delete an existing enrich policy +and its enrich index. + +[IMPORTANT] +==== +You must remove an enrich policy +from any in-use ingest pipelines +before deletion. +You cannot remove in-use enrich policies. +==== + + +[[delete-enrich-policy-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Enrich policy to delete. diff --git a/docs/reference/ingest/apis/enrich/enrich-stats.asciidoc b/docs/reference/ingest/apis/enrich/enrich-stats.asciidoc new file mode 100644 index 00000000000..550374abd54 --- /dev/null +++ b/docs/reference/ingest/apis/enrich/enrich-stats.asciidoc @@ -0,0 +1,135 @@ +[role="xpack"] +[testenv="basic"] +[[enrich-stats-api]] +=== Enrich stats API +++++ +Enrich stats +++++ + +Returns <> statistics +and information about enrich policies +that are currently executing. + +[source,console] +---- +GET /_enrich/_stats +---- + + +[[enrich-stats-api-request]] +==== {api-request-title} + +`GET /_enrich/_stats` + + +[[enrich-stats-api-response-body]] +==== {api-response-body-title} + +`executing_policies`:: ++ +-- +(Array of objects) +Objects containing information +about each enrich policy +that is currently executing. + +Returned parameters include: + +`name`:: +(String) +Name of the enrich policy. + +`task`:: +(<>) +Object containing detailed information +about the policy execution task. +-- + +`coordinator_stats`:: ++ +-- +(Array of objects) +Objects containing information +about each <> +for configured enrich processors. + +Returned parameters include: + +`node_id`:: +(String) +ID of the ingest node coordinating search requests +for configured enrich processors. + +`queue_size`:: +(Integer) +Number of search requests in the queue. + +`remote_requests_current`:: +(Integer) +Current number of outstanding remote requests. + +`remote_requests_total`:: +(Integer) +Number of outstanding remote requests executed +since node startup. ++ +In most cases, +a remote request includes multiple search requests. +This depends on the number of search requests in the queue +when the remote request is executed. + +`executed_searches_total`:: +(Integer) +Number of search requests +that enrich processors have executed +since node startup. +-- + + +[[enrich-stats-api-example]] +==== {api-examples-title} + + +[source,console] +---- +GET /_enrich/_stats +---- +//TEST[s/^/PUT \/_enrich\/policy\/my-policy\/_execute\/n/\ + +The API returns the following response: + +[source,console-result] +---- +{ + "executing_policies": [ + { + "name": "my-policy", + "task": { + "id" : 124, + "type" : "direct", + "action" : "cluster:admin/xpack/enrich/execute", + "start_time_in_millis" : 1458585884904, + "running_time_in_nanos" : 47402, + "cancellable" : false, + "parent_task_id" : "oTUltX4IQMOUUVeiohTt8A:123", + "headers" : { + "X-Opaque-Id" : "123456" + } + }, + } + ], + "coordinator_stats": [ + { + "node_id": "1sFM8cmSROZYhPxVsiWew", + "queue_size": 0, + "remote_requests_current": 0, + "remote_requests_total": 0, + "executed_searches_total": 0 + } + ] +} +---- +// TESTRESPONSE[s/"executing_policies": \[[^\]]*\]/"executing_policies": $body.$_path/] +// TESTRESPONSE[s/"node_id": "1sFM8cmSROZYhPxVsiWew"/"node_id" : $body.coordinator_stats.0.node_id/] +// TESTRESPONSE[s/"remote_requests_total": 0/"remote_requests_total" : $body.coordinator_stats.0.remote_requests_total/] +// TESTRESPONSE[s/"executed_searches_total": 0/"executed_searches_total" : $body.coordinator_stats.0.executed_searches_total/] diff --git a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc new file mode 100644 index 00000000000..f859c1201d9 --- /dev/null +++ b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc @@ -0,0 +1,103 @@ +[role="xpack"] +[testenv="basic"] +[[execute-enrich-policy-api]] +=== Execute enrich policy API +++++ +Execute enrich policy +++++ + +Executes an existing enrich policy. + +//// + +[source,console] +---- +PUT /users/_doc/1?refresh +{ + "email": "mardy.brown@asciidocsmith.com", + "first_name": "Mardy", + "last_name": "Brown", + "city": "New Orleans", + "county": "Orleans", + "state": "LA", + "zip": 70116, + "web": "mardy.asciidocsmith.com" +} + +PUT /_enrich/policy/my-policy +{ + "match": { + "indices": "users", + "match_field": "email", + "enrich_fields": ["first_name", "last_name", "city", "zip", "state"] + } +} +---- +// TESTSETUP +//// + +[source,console] +-------------------------------------------------- +PUT /_enrich/policy/my-policy/_execute +-------------------------------------------------- + +//// +[source,console] +-------------------------------------------------- +DELETE /_enrich/policy/my-policy +-------------------------------------------------- +// TEST[continued] +//// + + +[[execute-enrich-policy-api-request]] +==== {api-request-title} + +`PUT /_enrich/policy//_execute` + +`POST /_enrich/policy//_execute` + + +[[execute-enrich-policy-api-prereqs]] +==== {api-prereq-title} + +include::put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] + + +[[execute-enrich-policy-api-desc]] +==== {api-description-title} + +Use the execute enrich policy API +to create the enrich index for an existing enrich policy. + +// tag::execute-enrich-policy-def[] +The _enrich index_ contains documents from the policy's source indices. +Enrich indices always begin with `.enrich-*`, +are read-only, +and are <>. + +[WARNING] +==== +Enrich indices should be used by the <> only. +Avoid using enrich indices for other purposes. +==== +// end::execute-enrich-policy-def[] + +// tag::update-enrich-index[] +Once created, you cannot update +or index documents to an enrich index. +Instead, update your source indices +and execute the enrich policy again. +This creates a new enrich index from your updated source indices +and deletes the previous enrich index. +// end::update-enrich-index[] + +Because this API request performs several operations, +it may take a while to return a response. + +[[execute-enrich-policy-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Enrich policy to execute. \ No newline at end of file diff --git a/docs/reference/ingest/apis/enrich/get-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/get-enrich-policy.asciidoc new file mode 100644 index 00000000000..b3c0fe8f2ff --- /dev/null +++ b/docs/reference/ingest/apis/enrich/get-enrich-policy.asciidoc @@ -0,0 +1,225 @@ +[role="xpack"] +[testenv="basic"] +[[get-enrich-policy-api]] +=== Get enrich policy API +++++ +Get enrich policy +++++ + +Returns information about an enrich policy. + +//// +[source,console] +---- +PUT /users + +PUT /_enrich/policy/my-policy +{ + "match": { + "indices": "users", + "match_field": "email", + "enrich_fields": ["first_name", "last_name", "city", "zip", "state"] + } +} + +PUT /_enrich/policy/other-policy +{ + "match": { + "indices": "users", + "match_field": "email", + "enrich_fields": ["first_name", "last_name", "city", "zip", "state"] + } +} +---- +//// + +[source,console] +-------------------------------------------------- +GET /_enrich/policy/my-policy +-------------------------------------------------- +// TEST[continued] + + +[[get-enrich-policy-api-request]] +==== {api-request-title} + +`GET /_enrich/policy/` + +`GET /_enrich/policy` + +`GET /_enrich/policy1,policy2` + + +[[get-enrich-policy-api-prereqs]] +==== {api-prereq-title} + +include::put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] + + +[[get-enrich-policy-api-path-params]] +==== {api-path-parms-title} + +``:: ++ +-- +(Optional, string) +Comma-separated list of enrich policy names +used to limit the request. + +To return information for all enrich policies, +omit this parameter. +-- + + +[[get-enrich-policy-api-example]] +==== {api-examples-title} + + +[[get-enrich-policy-api-single-ex]] +===== Get a single policy + +[source,console] +-------------------------------------------------- +GET /_enrich/policy/my-policy +-------------------------------------------------- +// TEST[continued] + +The API returns the following response: + +[source,console-result] +-------------------------------------------------- +{ + "policies": [ + { + "config": { + "match": { + "name" : "my-policy", + "indices" : ["users"], + "match_field" : "email", + "enrich_fields" : [ + "first_name", + "last_name", + "city", + "zip", + "state" + ] + } + } + } + ] +} +-------------------------------------------------- + + +[[get-enrich-policy-api-commas-ex]] +===== Get multiple policies + +[source,console] +-------------------------------------------------- +GET /_enrich/policy/my-policy,other-policy +-------------------------------------------------- +// TEST[continued] + +The API returns the following response: + +[source,js] +-------------------------------------------------- +{ + "policies": [ + { + "config": { + "match": { + "name" : "my-policy", + "indices" : ["users"], + "match_field" : "email", + "enrich_fields" : [ + "first_name", + "last_name", + "city", + "zip", + "state" + ] + } + } + }, + { + "config": { + "match": { + "name" : "other-policy", + "indices" : ["users"], + "match_field" : "email", + "enrich_fields" : [ + "first_name", + "last_name", + "city", + "zip", + "state" + ] + } + } + } + ] +} +-------------------------------------------------- +// TESTRESPONSE + + +[[get-enrich-policy-api-all-ex]] +===== Get all policies + +[source,console] +-------------------------------------------------- +GET /_enrich/policy +-------------------------------------------------- +// TEST[continued] + +The API returns the following response: + +[source,console-result] +-------------------------------------------------- +{ + "policies": [ + { + "config": { + "match": { + "name" : "my-policy", + "indices" : ["users"], + "match_field" : "email", + "enrich_fields" : [ + "first_name", + "last_name", + "city", + "zip", + "state" + ] + } + } + }, + { + "config": { + "match": { + "name" : "other-policy", + "indices" : ["users"], + "match_field" : "email", + "enrich_fields" : [ + "first_name", + "last_name", + "city", + "zip", + "state" + ] + } + } + } + ] +} +-------------------------------------------------- + +//// +[source,console] +-------------------------------------------------- +DELETE /_enrich/policy/my-policy +DELETE /_enrich/policy/other-policy +-------------------------------------------------- +// TEST[continued] +//// diff --git a/docs/reference/ingest/apis/enrich/index.asciidoc b/docs/reference/ingest/apis/enrich/index.asciidoc new file mode 100644 index 00000000000..bd24e73c059 --- /dev/null +++ b/docs/reference/ingest/apis/enrich/index.asciidoc @@ -0,0 +1,21 @@ +[[enrich-apis]] +== Enrich APIs + +The following enrich APIs are available for managing enrich policies: + +* <> to add or update an enrich policy +* <> to delete an enrich policy +* <> to return information about an enrich policy +* <> to execute an enrich policy +* <> to get enrich-related stats + + +include::put-enrich-policy.asciidoc[] + +include::delete-enrich-policy.asciidoc[] + +include::get-enrich-policy.asciidoc[] + +include::execute-enrich-policy.asciidoc[] + +include::enrich-stats.asciidoc[] diff --git a/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc new file mode 100644 index 00000000000..359d5db262d --- /dev/null +++ b/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc @@ -0,0 +1,349 @@ +[role="xpack"] +[testenv="basic"] +[[put-enrich-policy-api]] +=== Put enrich policy API +++++ +Put enrich policy +++++ + +Creates an enrich policy. + +//// +[source,console] +---- +PUT /users +---- +//// + +[source,console] +---- +PUT /_enrich/policy/my-policy +{ + "match": { + "indices": "users", + "match_field": "email", + "enrich_fields": ["first_name", "last_name", "city", "zip", "state"] + } +} +---- +// TEST[continued] + +//// +[source,console] +-------------------------------------------------- +DELETE /_enrich/policy/my-policy +-------------------------------------------------- +// TEST[continued] +//// + + +[[put-enrich-policy-api-request]] +==== {api-request-title} + +`PUT /_enrich/policy/` + + +[[put-enrich-policy-api-prereqs]] +==== {api-prereq-title} + +// tag::enrich-policy-api-prereqs[] +If you use {es} {security-features}, you must have: + +* `read` index privileges for any indices used +* The `enrich_user` {stack-ov}/built-in-roles.html[built-in role] +// end::enrich-policy-api-prereqs[] + + +[[put-enrich-policy-api-desc]] +==== {api-description-title} + +Use the put enrich policy API +to create a new enrich policy. + +// tag::enrich-policy-def[] +An *enrich policy* is a set of rules the enrich processor uses +to append the appropriate data to incoming documents. +An enrich policy contains: + +* The *policy type*, + which determines how the processor enriches incoming documents +* A list of source indices +* The *match field* used to match incoming documents +* *Enrich fields* appended to incoming documents + from matching documents +// end::enrich-policy-def[] + + +===== Update an enrich policy + +// tag::update-enrich-policy[] +You cannot update an existing enrich policy. +Instead, you can: + +. Create and execute a new enrich policy. + +. Replace the previous enrich policy + with the new enrich policy + in any in-use enrich processors. + +. Use the <> API + to delete the previous enrich policy. +// end::update-enrich-policy[] + + +[[put-enrich-policy-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::{docdir}/rest-api/common-parms.asciidoc[tag=enrich-policy] + + +[[put-enrich-policy-api-request-body]] +==== {api-request-body-title} + +``:: ++ +-- +(Required, enrich policy object) +The parameter key is the enrich policy type. +The enrich policy type indicates +how the enrich processor matches incoming documents +to documents in the enrich index. + +Valid key values are: + +`match`:: +Match documents in the enrich index +using a <> for the `match_field`. +See <> for an example. + +`geo_match`:: +Match documents in the enrich index +using a <> for the `match_field`. +See <> for an example. + +The parameter value is the enrich policy. +The enrich policy is a set of rules +used to create an <>. +The enrich processor also uses these rules +to append field data to incoming documents. + +Parameters include: + +`indices`:: +(Required, array of strings) +Source indices used to create the enrich index. + +`query`:: +(Optional, string) +Query type used to find and select documents in the enrich index. +Valid value is <> (default). + +`match_field`:: +(Required, string) +Field used to match incoming documents +to documents in the enrich index. + +`enrich_fields`:: +(Required, Array of string) +Fields appended to incoming documents +from matching documents in the enrich index. +-- + +[[put-enrich-policy-api-example]] +==== {api-examples-title} + +[[put-enrich-policy-geo-match-ex]] +===== `geo_match` policy type + +You can use the `geo_match` enrich policy type +to enrich incoming documents +based on matching geo_shapes. +For example, +you can add postal codes +to incoming documents +based on a set of coordinates. + +To see how the `geo_match` policy type works, +try the following example. + +Use the <> +to create a source index. +The field mappings for the source index +must contain: + +* A <> field + which the enrich processor can use to match incoming documents +* One or more enrich fields + you'd like to append to incoming documents + +[source,console] +---- +PUT /postal_codes +{ + "mappings": { + "properties": { + "location": { + "type": "geo_shape" + }, + "postal_code": { + "type": "keyword" + } + } + } +} +---- + +Use the <> +to index data to this source index. + +[source,console] +---- +PUT /postal_codes/_doc/1?refresh=wait_for +{ + "location": { + "type": "envelope", + "coordinates": [[13.0, 53.0], [14.0, 52.0]] + }, + "postal_code": "96598" +} +---- +// TEST[continued] + +Use the put enrich policy API +to create an enrich policy +with the `geo_match` policy type. +This policy must include: + +* One or more source indices +* A `match_field`, + the `geo_shape` field from the source indices + used to match incoming documents +* Enrich fields from the source indices + you'd like to append to incoming documents + +[source,console] +---- +PUT /_enrich/policy/postal_policy +{ + "geo_match": { + "indices": "postal_codes", + "match_field": "location", + "enrich_fields": ["location","postal_code"] + } +} +---- +// TEST[continued] + +Use the <> +to create an enrich index for the policy. + +include::execute-enrich-policy.asciidoc[tag=execute-enrich-policy-def] + +[source,console] +---- +POST /_enrich/policy/postal_policy/_execute +---- +// TEST[continued] + +Use the <> +to create an ingest pipeline. +In the pipeline, +add an <> +that includes: + +* Your enrich policy +* The `field` of incoming documents used + to match the geo_shape of documents from the enrich index. +* The `target_field` used + to store appended enrich data for incoming documents. +* The `shape_relation`, + which indicates how the processor matches geo_shapes in incoming documents + to geo_shapes in documents from the enrich index. + See <<_spatial_relations>> for valid options and more information. + +[source,console] +---- +PUT /_ingest/pipeline/postal_lookup +{ + "description": "Enrich postal codes", + "processors": [ + { + "enrich": { + "policy_name": "postal_policy", + "field": "geo_location", + "target_field": "geo_data", + "shape_relation": "INTERSECTS" + } + } + ] +} +---- +// TEST[continued] + +Use the ingest pipeline +to index a document. +The incoming document +should include the `field` +specified in your enrich processor. + +[source,console] +---- +PUT /users/_doc/0?pipeline=postal_lookup +{ + "first_name": "Mardy", + "last_name": "Brown", + "geo_location": "POINT (13.5 52.5)" +} +---- +// TEST[continued] + +To verify the enrich processor matched +and appended the appropriate field data, +use the <> +to view the indexed document. + +[source,console] +---- +GET /users/_doc/0 +---- +// TEST[continued] + +The API returns the following response: + +[source,console-result] +---- +{ + "found": true, + "_index": "users", + "_type": "_doc", + "_id": "0", + "_version": 1, + "_seq_no": 55, + "_primary_term": 1, + "_source": { + "geo_data": { + "location": { + "type": "envelope", + "coordinates": [[13.0, 53.0], [14.0, 52.0]] + }, + "postal_code": "96598" + }, + "first_name": "Mardy", + "last_name": "Brown", + "geo_location": "POINT (13.5 52.5)" + } +} +---- +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term":1/"_primary_term" : $body._primary_term/] + +//// +[source,console] +-------------------------------------------------- +DELETE /_ingest/pipeline/postal_lookup + +DELETE /_enrich/policy/postal_policy +-------------------------------------------------- +// TEST[continued] +//// \ No newline at end of file diff --git a/docs/reference/ingest/enrich.asciidoc b/docs/reference/ingest/enrich.asciidoc new file mode 100644 index 00000000000..2054746ae6d --- /dev/null +++ b/docs/reference/ingest/enrich.asciidoc @@ -0,0 +1,288 @@ +[role="xpack"] +[testenv="basic"] +[[ingest-enriching-data]] +== Enrich your data + +You can use the <> +to append data from existing indices +to incoming documents during ingest. + +For example, you can use the enrich processor to: + +* Identify web services or vendors based on known IP addresses +* Add product information to retail orders based on product IDs +* Supplement contact information based on an email address +* Add postal codes based on user coordinates + + +[float] +[[enrich-setup]] +=== Set up an enrich processor + +To set up an enrich processor and learn how it works, +follow these steps: + +. Check the <>. +. <>. +. <>. +. <>. +. <>. +. <>. + +Once you have an enrich processor set up, +you can <> +and <> +using the <>. + +[IMPORTANT] +==== +The enrich processor performs several operations +and may impact the speed of your <>. + +We strongly recommend testing and benchmarking your enrich processors +before deploying them in production. + +We do not recommend using the enrich processor to append real-time data. +The enrich processor works best with reference data +that doesn't change frequently. +==== + +[float] +[[enrich-prereqs]] +==== Prerequisites + +include::{docdir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] + +[float] +[[create-enrich-source-index]] +==== Create a source index + +To begin, +create one or more source indices. + +A _source index_ contains data you want to append to incoming documents. +You can index and manage documents in a source index +like a regular index. + +The following <> request creates the `users` source index +containing user data. +This request also indexes a new document to the `users` source index. + +[source,console] +---- +PUT /users/_doc/1?refresh=wait_for +{ + "email": "mardy.brown@asciidocsmith.com", + "first_name": "Mardy", + "last_name": "Brown", + "city": "New Orleans", + "county": "Orleans", + "state": "LA", + "zip": 70116, + "web": "mardy.asciidocsmith.com" +} +---- + +You also can set up {beats-ref}/getting-started.html[{beats}], +such as a {filebeat-ref}/filebeat-getting-started.html[{filebeat}], +to automatically send and index documents +to your source indices. +See {beats-ref}/getting-started.html[Getting started with {beats}]. + + +[float] +[[create-enrich-policy]] +==== Create an enrich policy + +Use the <> +to create an enrich policy. + +include::{docdir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-def] + +[source,console] +---- +PUT /_enrich/policy/users-policy +{ + "match": { + "indices": "users", + "match_field": "email", + "enrich_fields": ["first_name", "last_name", "city", "zip", "state"] + } +} +---- +// TEST[continued] + + +[float] +[[execute-enrich-policy]] +==== Execute an enrich policy + +Use the <> +to create an enrich index for the policy. + +include::apis/enrich/execute-enrich-policy.asciidoc[tag=execute-enrich-policy-def] + +The following request executes the `users-policy` enrich policy. +Because this API request performs several operations, +it may take a while to return a response. + +[source,console] +---- +POST /_enrich/policy/users-policy/_execute +---- +// TEST[continued] + + +[float] +[[add-enrich-processor]] +==== Add the enrich processor to an ingest pipeline + +Use the <> +to create an ingest pipeline. +Include an <> +that uses your enrich policy. + +When defining an enrich processor, +you must include the following: + +* The field used to match incoming documents + to documents in the enrich index. ++ +This field should be included in incoming documents. + +* The target field added to incoming documents. + This field contains all appended enrich data. + +The following request adds a new pipeline, `user_lookup`. +This pipeline includes an enrich processor +that uses the `users-policy` enrich policy. + +[source,console] +---- +PUT /_ingest/pipeline/user_lookup +{ + "description" : "Enriching user details to messages", + "processors" : [ + { + "enrich" : { + "policy_name": "users-policy", + "field" : "email", + "target_field": "user", + "max_matches": "1" + } + } + ] +} +---- +// TEST[continued] + +Because the enrich policy type is `match`, +the enrich processor matches incoming documents +to documents in the enrich index +based on match field values. +The enrich processor then appends the enrich field data +from matching documents in the enrich index +to the target field of incoming documents. + +Because the `max_matches` option for the enrich processor is `1`, +the enrich processor appends the data from only the best matching document +to each incoming document's target field as an object. + +If the `max_matches` option were greater than `1`, +the processor could append data from up to the `max_matches` number of documents +to the target field as an array. + +If the incoming document matches no documents in the enrich index, +the processor appends no data. + +You also can add other <> +to your ingest pipeline. +You can use these processors to change or drop incoming documents +based on your criteria. +See <> for a list of built-in processors. + + +[float] +[[ingest-enrich-docs]] +==== Ingest and enrich documents + +Index incoming documents using your ingest pipeline. + +The following <> request uses the ingest pipeline +to index a document +containing the `email` field +specified in the enrich processor. + +[source,console] +---- +PUT /my_index/_doc/my_id?pipeline=user_lookup +{ + "email": "mardy.brown@asciidocsmith.com" +} +---- +// TEST[continued] + +To verify the enrich processor matched +and appended the appropriate field data, +use the <> to view the indexed document. + +[source,console] +---- +GET /my_index/_doc/my_id +---- +// TEST[continued] + +The API returns the following response: + +[source,console-result] +---- +{ + "found": true, + "_index": "my_index", + "_type": "_doc", + "_id": "my_id", + "_version": 1, + "_seq_no": 55, + "_primary_term": 1, + "_source": { + "user": { + "email": "mardy.brown@asciidocsmith.com", + "first_name": "Mardy", + "last_name": "Brown", + "zip": 70116, + "city": "New Orleans", + "state": "LA" + }, + "email": "mardy.brown@asciidocsmith.com" + } +} +---- +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term":1/"_primary_term" : $body._primary_term/] + + +[float] +[[update-enrich-data]] +=== Update your enrich index + +include::{docdir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=update-enrich-index] + +If wanted, you can <> +or <> any already ingested documents +using your ingest pipeline. + + +[float] +[[update-enrich-policies]] +=== Update an enrich policy + +include::apis/enrich/put-enrich-policy.asciidoc[tag=update-enrich-policy] + +//// +[source,console] +-------------------------------------------------- +DELETE /_ingest/pipeline/user_lookup + +DELETE /_enrich/policy/users-policy +-------------------------------------------------- +// TEST[continued] +//// diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index e9813f44f53..e1b349b84bd 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -753,6 +753,10 @@ metadata field to provide the error message. -------------------------------------------------- // NOTCONSOLE + +include::enrich.asciidoc[] + + [[ingest-processors]] == Processors @@ -829,6 +833,7 @@ include::processors/date-index-name.asciidoc[] include::processors/dissect.asciidoc[] include::processors/dot-expand.asciidoc[] include::processors/drop.asciidoc[] +include::processors/enrich.asciidoc[] include::processors/fail.asciidoc[] include::processors/foreach.asciidoc[] include::processors/geoip.asciidoc[] diff --git a/docs/reference/ingest/processors/enrich.asciidoc b/docs/reference/ingest/processors/enrich.asciidoc new file mode 100644 index 00000000000..b02d4569dbb --- /dev/null +++ b/docs/reference/ingest/processors/enrich.asciidoc @@ -0,0 +1,24 @@ +[role="xpack"] +[testenv="basic"] +[[enrich-processor]] +=== Enrich Processor + +The `enrich` processor can enrich documents with data from another index. +See <> section for more information how to set this up and +check out the <> to get familiar with enrich policies and related APIs. + +[[enrich-options]] +.Enrich Options +[options="header"] +|====== +| Name | Required | Default | Description +| `policy_name` | yes | - | The name of the enrich policy to use. +| `field` | yes | - | The field in the input document that matches the policies match_field used to retrieve the enrichment data. +| `target_field` | yes | - | The field that will be used for the enrichment data. +| `ignore_missing` | no | false | If `true` and `field` does not exist, the processor quietly exits without modifying the document +| `override` | no | true | If processor will update fields with pre-existing non-null-valued field. When set to `false`, such fields will not be touched. +| `max_matches` | no | 1 | The maximum number of matched documents to include under the configured target field. The `target_field` will be turned into a json array if `max_matches` is higher than 1, otherwise `target_field` will become a json object. In order to avoid documents getting too large, the maximum allowed value is 128. +| `shape_relation` | no | `INTERSECTS` | A spatial relation operator used to match the <> of incoming documents to documents in the enrich index. This option is only used for `geo_match` enrich policy types. The <> mapping parameter determines which spatial relation operators are available. See <<_spatial_relations>> for operators and more information. + +include::common-options.asciidoc[] +|====== diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 3a021b7e1ee..e5301f99715 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -77,7 +77,7 @@ tag::committed[] If `true`, the segments is synced to disk. Segments that are synced can survive a hard reboot. + -If `false`, +If `false`, the data from uncommitted segments is also stored in the transaction log so that Elasticsearch is able to replay changes on the next start. @@ -122,6 +122,11 @@ is based on Lucene documents. {es} reclaims the disk space of deleted Lucene documents when a segment is merged. end::docs-deleted[] +tag::enrich-policy[] +Enrich policy name +used to limit the request. +end::enrich-policy[] + tag::expand-wildcards[] `expand_wildcards`:: + @@ -279,8 +284,8 @@ end::include-defaults[] tag::include-segment-file-sizes[] `include_segment_file_sizes`:: (Optional, boolean) -If `true`, the call reports the aggregated disk usage of -each one of the Lucene index files (only applies if segment stats are +If `true`, the call reports the aggregated disk usage of +each one of the Lucene index files (only applies if segment stats are requested). Defaults to `false`. end::include-segment-file-sizes[] @@ -504,7 +509,7 @@ end::positions[] tag::preference[] `preference`:: -(Optional, string) Specifies the node or shard the operation should be +(Optional, string) Specifies the node or shard the operation should be performed on. Random by default. end::preference[] @@ -652,7 +657,7 @@ end::source_includes[] tag::stats[] `stats`:: -(Optional, string) Specific `tag` of the request for logging and statistical +(Optional, string) Specific `tag` of the request for logging and statistical purposes. end::stats[] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index c6a36b8e2ea..7250f146608 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -15,6 +15,7 @@ not be included yet. * <> * <> * <> +* <> * <> * <> * <> @@ -38,6 +39,7 @@ include::{es-repo-dir}/cat.asciidoc[] include::{es-repo-dir}/cluster.asciidoc[] include::{es-repo-dir}/ccr/apis/ccr-apis.asciidoc[] include::{es-repo-dir}/docs.asciidoc[] +include::{es-repo-dir}/ingest/apis/enrich/index.asciidoc[] include::{es-repo-dir}/graph/explore.asciidoc[] include::{es-repo-dir}/indices.asciidoc[] include::{es-repo-dir}/ilm/apis/ilm-api.asciidoc[] diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index 070e99cc5c7..966190ee0ad 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -74,7 +74,7 @@ class SimulateExecutionService { responses.add(response); } if (counter.incrementAndGet() == request.getDocuments().size()) { - l.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), + listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses)); } }); diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java index 971dede5b7f..cca8afd8c09 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java @@ -121,7 +121,7 @@ public class MultiSearchResponse extends ActionResponse implements Iterable + * This is available in for all license types except + * {@link OperationMode#MISSING} + * + * @return {@code true} as long as the license is valid. Otherwise + * {@code false}. + */ + public boolean isEnrichAllowed() { + // status is volatile + Status localStatus = status; + // Should work on all active licenses + return localStatus.active; + } + /** * Determine if SQL support should be enabled. *

diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClient.java index 41121878111..439abc808cb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClient.java @@ -17,6 +17,7 @@ import org.elasticsearch.protocol.xpack.frozen.FreezeResponse; import org.elasticsearch.xpack.core.action.XPackInfoAction; import org.elasticsearch.xpack.core.action.XPackInfoRequestBuilder; import org.elasticsearch.xpack.core.ccr.client.CcrClient; +import org.elasticsearch.xpack.core.enrich.client.EnrichClient; import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; import org.elasticsearch.xpack.core.ilm.client.ILMClient; import org.elasticsearch.xpack.core.ml.client.MachineLearningClient; @@ -43,6 +44,7 @@ public class XPackClient { private final WatcherClient watcherClient; private final MachineLearningClient machineLearning; private final ILMClient ilmClient; + private final EnrichClient enrichClient; public XPackClient(Client client) { this.client = Objects.requireNonNull(client, "client"); @@ -53,6 +55,7 @@ public class XPackClient { this.watcherClient = new WatcherClient(client); this.machineLearning = new MachineLearningClient(client); this.ilmClient = new ILMClient(client); + this.enrichClient = new EnrichClient(client); } public Client es() { @@ -87,6 +90,10 @@ public class XPackClient { return ilmClient; } + public EnrichClient enrichClient() { + return enrichClient; + } + public XPackClient withHeaders(Map headers) { return new XPackClient(client.filterWithHeader(headers)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 03b8e2bc49f..ba0a77cd054 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -6,8 +6,8 @@ package org.elasticsearch.xpack.core; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; @@ -43,6 +43,10 @@ import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.CCRFeatureSet; import org.elasticsearch.xpack.core.analytics.AnalyticsFeatureSetUsage; import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; import org.elasticsearch.xpack.core.flattened.FlattenedFeatureSetUsage; import org.elasticsearch.xpack.core.frozen.FrozenIndicesFeatureSetUsage; import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; @@ -436,8 +440,13 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl DeleteTransformAction.INSTANCE, GetTransformAction.INSTANCE, GetTransformStatsAction.INSTANCE, - PreviewTransformAction.INSTANCE - ); + PreviewTransformAction.INSTANCE, + // enrich + DeleteEnrichPolicyAction.INSTANCE, + ExecuteEnrichPolicyAction.INSTANCE, + GetEnrichPolicyAction.INSTANCE, + PutEnrichPolicyAction.INSTANCE + ); } @Override @@ -613,7 +622,7 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl TransformState::fromXContent), new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(TransformField.TASK_NAME), TransformState::fromXContent) - ); + ); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 72c9ae6f1ff..8b53297ca5e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -40,6 +40,11 @@ public class XPackSettings { } + /** + * Setting for controlling whether or not enrich is enabled. + */ + public static final Setting ENRICH_ENABLED_SETTING = Setting.boolSetting("xpack.enrich.enabled", true, Property.NodeScope); + /** * Setting for controlling whether or not CCR is enabled. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java new file mode 100644 index 00000000000..4b56c5b59a5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java @@ -0,0 +1,378 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.enrich; + +import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Represents an enrich policy including its configuration. + */ +public final class EnrichPolicy implements Writeable, ToXContentFragment { + + public static final String ENRICH_INDEX_NAME_BASE = ".enrich-"; + + public static final String MATCH_TYPE = "match"; + public static final String GEO_MATCH_TYPE = "geo_match"; + public static final String[] SUPPORTED_POLICY_TYPES = new String[]{ + MATCH_TYPE, + GEO_MATCH_TYPE + }; + + private static final ParseField QUERY = new ParseField("query"); + private static final ParseField INDICES = new ParseField("indices"); + private static final ParseField MATCH_FIELD = new ParseField("match_field"); + private static final ParseField ENRICH_FIELDS = new ParseField("enrich_fields"); + private static final ParseField ELASTICSEARCH_VERSION = new ParseField("elasticsearch_version"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "policy", + false, + (args, policyType) -> new EnrichPolicy( + policyType, + (QuerySource) args[0], + (List) args[1], + (String) args[2], + (List) args[3], + (Version) args[4] + ) + ); + + static { + declareCommonConstructorParsingOptions(PARSER); + } + + private static void declareCommonConstructorParsingOptions(ConstructingObjectParser parser) { + parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { + XContentBuilder contentBuilder = XContentBuilder.builder(p.contentType().xContent()); + contentBuilder.generator().copyCurrentStructure(p); + return new QuerySource(BytesReference.bytes(contentBuilder), contentBuilder.contentType()); + }, QUERY); + parser.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES); + parser.declareString(ConstructingObjectParser.constructorArg(), MATCH_FIELD); + parser.declareStringArray(ConstructingObjectParser.constructorArg(), ENRICH_FIELDS); + parser.declareField(ConstructingObjectParser.optionalConstructorArg(), ((p, c) -> Version.fromString(p.text())), + ELASTICSEARCH_VERSION, ValueType.STRING); + } + + public static EnrichPolicy fromXContent(XContentParser parser) throws IOException { + Token token = parser.currentToken(); + if (token != Token.START_OBJECT) { + token = parser.nextToken(); + } + if (token != Token.START_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + token = parser.nextToken(); + if (token != Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + String policyType = parser.currentName(); + EnrichPolicy policy = PARSER.parse(parser, policyType); + token = parser.nextToken(); + if (token != Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + return policy; + } + + private final String type; + private final QuerySource query; + private final List indices; + private final String matchField; + private final List enrichFields; + private final Version elasticsearchVersion; + + public EnrichPolicy(StreamInput in) throws IOException { + this( + in.readString(), + in.readOptionalWriteable(QuerySource::new), + in.readStringList(), + in.readString(), + in.readStringList(), + Version.readVersion(in) + ); + } + + public EnrichPolicy(String type, + QuerySource query, + List indices, + String matchField, + List enrichFields) { + this(type, query, indices, matchField, enrichFields, Version.CURRENT); + } + + public EnrichPolicy(String type, + QuerySource query, + List indices, + String matchField, + List enrichFields, + Version elasticsearchVersion) { + this.type = type; + this.query = query; + this.indices = indices; + this.matchField = matchField; + this.enrichFields = enrichFields; + this.elasticsearchVersion = elasticsearchVersion != null ? elasticsearchVersion : Version.CURRENT; + } + + public String getType() { + return type; + } + + public QuerySource getQuery() { + return query; + } + + public List getIndices() { + return indices; + } + + public String getMatchField() { + return matchField; + } + + public List getEnrichFields() { + return enrichFields; + } + + public Version getElasticsearchVersion() { + return elasticsearchVersion; + } + + public static String getBaseName(String policyName) { + return ENRICH_INDEX_NAME_BASE + policyName; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(type); + out.writeOptionalWriteable(query); + out.writeStringCollection(indices); + out.writeString(matchField); + out.writeStringCollection(enrichFields); + Version.writeVersion(elasticsearchVersion, out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(type); + { + toInnerXContent(builder, params); + } + builder.endObject(); + return builder; + } + + private void toInnerXContent(XContentBuilder builder, Params params) throws IOException { + if (query != null) { + builder.field(QUERY.getPreferredName(), query.getQueryAsMap()); + } + builder.array(INDICES.getPreferredName(), indices.toArray(new String[0])); + builder.field(MATCH_FIELD.getPreferredName(), matchField); + builder.array(ENRICH_FIELDS.getPreferredName(), enrichFields.toArray(new String[0])); + if (params.paramAsBoolean("include_version", false) && elasticsearchVersion != null) { + builder.field(ELASTICSEARCH_VERSION.getPreferredName(), elasticsearchVersion.toString()); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnrichPolicy policy = (EnrichPolicy) o; + return type.equals(policy.type) && + Objects.equals(query, policy.query) && + indices.equals(policy.indices) && + matchField.equals(policy.matchField) && + enrichFields.equals(policy.enrichFields) && + elasticsearchVersion.equals(policy.elasticsearchVersion); + } + + @Override + public int hashCode() { + return Objects.hash( + type, + query, + indices, + matchField, + enrichFields, + elasticsearchVersion + ); + } + + public String toString() { + return Strings.toString(this); + } + + public static class QuerySource implements Writeable { + + private final BytesReference query; + private final XContentType contentType; + + QuerySource(StreamInput in) throws IOException { + this(in.readBytesReference(), in.readEnum(XContentType.class)); + } + + public QuerySource(BytesReference query, XContentType contentType) { + this.query = query; + this.contentType = contentType; + } + + public BytesReference getQuery() { + return query; + } + + public Map getQueryAsMap() { + return XContentHelper.convertToMap(query, true, contentType).v2(); + } + + public XContentType getContentType() { + return contentType; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBytesReference(query); + out.writeEnum(contentType); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QuerySource that = (QuerySource) o; + return query.equals(that.query) && + contentType == that.contentType; + } + + @Override + public int hashCode() { + return Objects.hash(query, contentType); + } + } + + public static class NamedPolicy implements Writeable, ToXContentFragment { + + static final ParseField NAME = new ParseField("name"); + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "named_policy", + false, + (args, policyType) -> new NamedPolicy( + (String) args[0], + new EnrichPolicy(policyType, + (QuerySource) args[1], + (List) args[2], + (String) args[3], + (List) args[4], + (Version) args[5]) + ) + ); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); + declareCommonConstructorParsingOptions(PARSER); + } + + private final String name; + private final EnrichPolicy policy; + + public NamedPolicy(String name, EnrichPolicy policy) { + this.name = name; + this.policy = policy; + } + + public NamedPolicy(StreamInput in) throws IOException { + name = in.readString(); + policy = new EnrichPolicy(in); + } + + public String getName() { + return name; + } + + public EnrichPolicy getPolicy() { + return policy; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + policy.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(policy.type); + { + builder.field(NAME.getPreferredName(), name); + policy.toInnerXContent(builder, params); + } + builder.endObject(); + return builder; + } + + public static NamedPolicy fromXContent(XContentParser parser) throws IOException { + Token token = parser.currentToken(); + if (token != Token.START_OBJECT) { + token = parser.nextToken(); + } + if (token != Token.START_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + token = parser.nextToken(); + if (token != Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + String policyType = parser.currentName(); + token = parser.nextToken(); + if (token != Token.START_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + NamedPolicy policy = PARSER.parse(parser, policyType); + token = parser.nextToken(); + if (token != Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "unexpected token"); + } + return policy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NamedPolicy that = (NamedPolicy) o; + return name.equals(that.name) && + policy.equals(that.policy); + } + + @Override + public int hashCode() { + return Objects.hash(name, policy); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java new file mode 100644 index 00000000000..0166e7c2594 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.enrich.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteEnrichPolicyAction extends ActionType { + + public static final DeleteEnrichPolicyAction INSTANCE = new DeleteEnrichPolicyAction(); + public static final String NAME = "cluster:admin/xpack/enrich/delete"; + + private DeleteEnrichPolicyAction() { + super(NAME, AcknowledgedResponse::new); + } + + public static class Request extends MasterNodeRequest { + + private final String name; + + public Request(String name) { + this.name = Objects.requireNonNull(name, "name cannot be null"); + } + + public Request(StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + } + + public String getName() { + return name; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return name.equals(request.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java new file mode 100644 index 00000000000..3c19d2d33bd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java @@ -0,0 +1,249 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.enrich.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.tasks.TaskInfo; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class EnrichStatsAction extends ActionType { + + public static final EnrichStatsAction INSTANCE = new EnrichStatsAction(); + public static final String NAME = "cluster:admin/xpack/enrich/stats"; + + private EnrichStatsAction() { + super(NAME, Response::new); + } + + public static class Request extends MasterNodeRequest { + + public Request() { + } + + public Request(StreamInput in) throws IOException { + super(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final List executingPolicies; + private final List coordinatorStats; + + public Response(List executingPolicies, List coordinatorStats) { + this.executingPolicies = executingPolicies; + this.coordinatorStats = coordinatorStats; + } + + public Response(StreamInput in) throws IOException { + super(in); + executingPolicies = in.readList(ExecutingPolicy::new); + coordinatorStats = in.readList(CoordinatorStats::new); + } + + public List getExecutingPolicies() { + return executingPolicies; + } + + public List getCoordinatorStats() { + return coordinatorStats; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeList(executingPolicies); + out.writeList(coordinatorStats); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray("executing_policies"); + for (ExecutingPolicy policy : executingPolicies) { + builder.startObject(); + policy.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); + builder.startArray("coordinator_stats"); + for (CoordinatorStats entry : coordinatorStats) { + builder.startObject(); + entry.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return executingPolicies.equals(response.executingPolicies) && + coordinatorStats.equals(response.coordinatorStats); + } + + @Override + public int hashCode() { + return Objects.hash(executingPolicies, coordinatorStats); + } + + public static class CoordinatorStats implements Writeable, ToXContentFragment { + + private final String nodeId; + private final int queueSize; + private final int remoteRequestsCurrent; + private final long remoteRequestsTotal; + private final long executedSearchesTotal; + + public CoordinatorStats(String nodeId, + int queueSize, + int remoteRequestsCurrent, + long remoteRequestsTotal, + long executedSearchesTotal) { + this.nodeId = nodeId; + this.queueSize = queueSize; + this.remoteRequestsCurrent = remoteRequestsCurrent; + this.remoteRequestsTotal = remoteRequestsTotal; + this.executedSearchesTotal = executedSearchesTotal; + } + + public CoordinatorStats(StreamInput in) throws IOException { + this(in.readString(), in.readVInt(), in.readVInt(), in.readVLong(), in.readVLong()); + } + + public String getNodeId() { + return nodeId; + } + + public int getQueueSize() { + return queueSize; + } + + public int getRemoteRequestsCurrent() { + return remoteRequestsCurrent; + } + + public long getRemoteRequestsTotal() { + return remoteRequestsTotal; + } + + public long getExecutedSearchesTotal() { + return executedSearchesTotal; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(nodeId); + out.writeVInt(queueSize); + out.writeVInt(remoteRequestsCurrent); + out.writeVLong(remoteRequestsTotal); + out.writeVLong(executedSearchesTotal); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("node_id", nodeId); + builder.field("queue_size", queueSize); + builder.field("remote_requests_current", remoteRequestsCurrent); + builder.field("remote_requests_total", remoteRequestsTotal); + builder.field("executed_searches_total", executedSearchesTotal); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CoordinatorStats stats = (CoordinatorStats) o; + return Objects.equals(nodeId, stats.nodeId) && + queueSize == stats.queueSize && + remoteRequestsCurrent == stats.remoteRequestsCurrent && + remoteRequestsTotal == stats.remoteRequestsTotal && + executedSearchesTotal == stats.executedSearchesTotal; + } + + @Override + public int hashCode() { + return Objects.hash(nodeId, queueSize, remoteRequestsCurrent, remoteRequestsTotal, executedSearchesTotal); + } + } + + public static class ExecutingPolicy implements Writeable, ToXContentFragment { + + private final String name; + private final TaskInfo taskInfo; + + public ExecutingPolicy(String name, TaskInfo taskInfo) { + this.name = name; + this.taskInfo = taskInfo; + } + + ExecutingPolicy(StreamInput in) throws IOException { + this(in.readString(), new TaskInfo(in)); + } + + public String getName() { + return name; + } + + public TaskInfo getTaskInfo() { + return taskInfo; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + taskInfo.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("name", name); + builder.startObject("task"); + { + builder.value(taskInfo); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExecutingPolicy that = (ExecutingPolicy) o; + return name.equals(that.name) && + taskInfo.equals(that.taskInfo); + } + + @Override + public int hashCode() { + return Objects.hash(name, taskInfo); + } + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java new file mode 100644 index 00000000000..e5c6ab5eb67 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.enrich.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.tasks.TaskId; + +import java.io.IOException; +import java.util.Objects; + +public class ExecuteEnrichPolicyAction extends ActionType { + + public static final ExecuteEnrichPolicyAction INSTANCE = new ExecuteEnrichPolicyAction(); + public static final String NAME = "cluster:admin/xpack/enrich/execute"; + + private ExecuteEnrichPolicyAction() { + super(NAME, ExecuteEnrichPolicyAction.Response::new); + } + + public static class Request extends MasterNodeRequest { + + private final String name; + private boolean waitForCompletion; + + public Request(String name) { + this.name = Objects.requireNonNull(name, "name cannot be null"); + this.waitForCompletion = true; + } + + public Request(StreamInput in) throws IOException { + super(in); + name = in.readString(); + waitForCompletion = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + out.writeBoolean(waitForCompletion); + } + + public String getName() { + return name; + } + + public boolean isWaitForCompletion() { + return waitForCompletion; + } + + public Request setWaitForCompletion(boolean waitForCompletion) { + this.waitForCompletion = waitForCompletion; + return this; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + // This will be displayed in tasks api and allows stats api to figure out which policies are being executed. + @Override + public String getDescription() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return waitForCompletion == request.waitForCompletion && + Objects.equals(name, request.name); + } + + @Override + public int hashCode() { + return Objects.hash(name, waitForCompletion); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final TaskId taskId; + private final ExecuteEnrichPolicyStatus status; + + public Response(ExecuteEnrichPolicyStatus status) { + this.taskId = null; + this.status = status; + } + + public Response(TaskId taskId) { + this.taskId = taskId; + this.status = null; + } + + public TaskId getTaskId() { + return taskId; + } + + public ExecuteEnrichPolicyStatus getStatus() { + return status; + } + + public Response(StreamInput in) throws IOException { + super(in); + if (in.readBoolean()) { + this.status = new ExecuteEnrichPolicyStatus(in); + this.taskId = null; + } else { + this.taskId = TaskId.readFromStream(in); + this.status = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + boolean waitedForCompletion = status != null; + out.writeBoolean(waitedForCompletion); + if (waitedForCompletion) { + status.writeTo(out); + } else { + taskId.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (taskId != null) { + builder.field("task", taskId.getNodeId() + ":" + taskId.getId()); + } else { + builder.field("status", status); + } + } + builder.endObject(); + return builder; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyStatus.java new file mode 100644 index 00000000000..96435b6f95f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyStatus.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.enrich.action; + +import java.io.IOException; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.tasks.Task; + +public class ExecuteEnrichPolicyStatus implements Task.Status { + + public static final class PolicyPhases { + private PolicyPhases() {} + + public static final String SCHEDULED = "SCHEDULED"; + public static final String RUNNING = "RUNNING"; + public static final String COMPLETE = "COMPLETE"; + public static final String FAILED = "FAILED"; + } + + public static final String NAME = "enrich-policy-execution"; + + private static final String PHASE_FIELD = "phase"; + + private final String phase; + + public ExecuteEnrichPolicyStatus(String phase) { + this.phase = phase; + } + + public ExecuteEnrichPolicyStatus(StreamInput in) throws IOException { + this.phase = in.readString(); + } + + public String getPhase() { + return phase; + } + + public boolean isCompleted() { + return PolicyPhases.COMPLETE.equals(phase); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(phase); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(PHASE_FIELD, phase); + } + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java new file mode 100644 index 00000000000..c1543c8578b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.enrich.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; +import java.util.stream.Collectors; + +public class GetEnrichPolicyAction extends ActionType { + + public static final GetEnrichPolicyAction INSTANCE = new GetEnrichPolicyAction(); + public static final String NAME = "cluster:admin/xpack/enrich/get"; + + private GetEnrichPolicyAction() { + super(NAME, Response::new); + } + + public static class Request extends MasterNodeReadRequest { + + private final List names; + + public Request() { + this.names = new ArrayList<>(); + } + + public Request(String[] names) { + this.names = Arrays.asList(names); + } + + public Request(StreamInput in) throws IOException { + super(in); + this.names = in.readStringList(); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public List getNames() { + return names; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringCollection(names); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(names, request.names); + } + + @Override + public int hashCode() { + return Objects.hash(names); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final List policies; + + public Response(Map policies) { + Objects.requireNonNull(policies, "policies cannot be null"); + // use a treemap to guarantee ordering in the set, then transform it to the list of named policies + this.policies = new TreeMap<>(policies).entrySet().stream() + .map(entry -> new EnrichPolicy.NamedPolicy(entry.getKey(), entry.getValue())).collect(Collectors.toList()); + } + + public Response(StreamInput in) throws IOException { + policies = in.readList(EnrichPolicy.NamedPolicy::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeList(policies); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.startArray("policies"); + { + for (EnrichPolicy.NamedPolicy policy : policies) { + builder.startObject(); + { + builder.startObject("config"); + { + policy.toXContent(builder, params); + } + builder.endObject(); + } + builder.endObject(); + } + } + builder.endArray(); + } + builder.endObject(); + + return builder; + } + + public List getPolicies() { + return policies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return policies.equals(response.policies); + } + + @Override + public int hashCode() { + return Objects.hash(policies); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java new file mode 100644 index 00000000000..d30ba4c383e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.enrich.action; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.io.IOException; +import java.util.Objects; + +public class PutEnrichPolicyAction extends ActionType { + + public static final PutEnrichPolicyAction INSTANCE = new PutEnrichPolicyAction(); + public static final String NAME = "cluster:admin/xpack/enrich/put"; + + private PutEnrichPolicyAction() { + super(NAME, AcknowledgedResponse::new); + } + + public static Request fromXContent(XContentParser parser, String name) throws IOException { + return new Request(name, EnrichPolicy.fromXContent(parser)); + } + + public static class Request extends MasterNodeRequest { + + private final EnrichPolicy policy; + private final String name; + + public Request(String name, EnrichPolicy policy) { + this.name = Objects.requireNonNull(name, "name cannot be null"); + if (!Version.CURRENT.equals(policy.getElasticsearchVersion())) { + throw new IllegalArgumentException("Cannot set [version_created] field on enrich policy [" + name + + "]. Found [" + policy.getElasticsearchVersion() + "]"); + } + this.policy = policy; + } + + public Request(StreamInput in) throws IOException { + super(in); + name = in.readString(); + policy = new EnrichPolicy(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + policy.writeTo(out); + } + + public String getName() { + return name; + } + + public EnrichPolicy getPolicy() { + return policy; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return policy.equals(request.policy) && + name.equals(request.name); + } + + @Override + public int hashCode() { + return Objects.hash(policy, name); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/client/EnrichClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/client/EnrichClient.java new file mode 100644 index 00000000000..c12df5b49d2 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/client/EnrichClient.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.enrich.client; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.Objects; + +public class EnrichClient { + + private final ElasticsearchClient client; + + public EnrichClient(ElasticsearchClient client) { + this.client = Objects.requireNonNull(client, "client"); + } + + public void deleteEnrichPolicy( + final DeleteEnrichPolicyAction.Request request, + final ActionListener listener) { + client.execute(DeleteEnrichPolicyAction.INSTANCE, request, listener); + } + + public ActionFuture deleteEnrichPolicy(final DeleteEnrichPolicyAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(DeleteEnrichPolicyAction.INSTANCE, request, listener); + return listener; + } + + public void executeEnrichPolicy( + final ExecuteEnrichPolicyAction.Request request, + final ActionListener listener) { + client.execute(ExecuteEnrichPolicyAction.INSTANCE, request, listener); + } + + public ActionFuture executeEnrichPolicy(final ExecuteEnrichPolicyAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(ExecuteEnrichPolicyAction.INSTANCE, request, listener); + return listener; + } + + public void getEnrichPolicy( + final GetEnrichPolicyAction.Request request, + final ActionListener listener) { + client.execute(GetEnrichPolicyAction.INSTANCE, request, listener); + } + + public ActionFuture getEnrichPolicy(final GetEnrichPolicyAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(GetEnrichPolicyAction.INSTANCE, request, listener); + return listener; + } + + public void putEnrichPolicy( + final PutEnrichPolicyAction.Request request, + final ActionListener listener) { + client.execute(PutEnrichPolicyAction.INSTANCE, request, listener); + } + + public ActionFuture putEnrichPolicy(final PutEnrichPolicyAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(PutEnrichPolicyAction.INSTANCE, request, listener); + return listener; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index fbd1cd58932..89016ab8b41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -774,6 +774,10 @@ public class RoleDescriptor implements ToXContentObject, Writeable { return this; } + public Builder indices(Collection indices) { + return indices(indices.toArray(new String[indices.size()])); + } + public Builder privileges(String... privileges) { indicesPrivileges.privileges = privileges; return this; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 57a4325062f..920d0e6e22f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -84,6 +84,7 @@ public class ClusterPrivilegeResolver { Collections.unmodifiableSet(Sets.newHashSet("cluster:admin/slm/*", StartILMAction.NAME, StopILMAction.NAME, GetStatusAction.NAME)); private static final Set READ_SLM_PATTERN = Collections.unmodifiableSet(Sets.newHashSet(GetSnapshotLifecycleAction.NAME, GetStatusAction.NAME)); + private static final Set MANAGE_ENRICH_AUTOMATON = Collections.unmodifiableSet(Sets.newHashSet("cluster:admin/xpack/enrich/*")); public static final NamedClusterPrivilege NONE = new ActionClusterPrivilege("none", Collections.emptySet(), Collections.emptySet()); public static final NamedClusterPrivilege ALL = new ActionClusterPrivilege("all", ALL_CLUSTER_PATTERN); @@ -128,6 +129,7 @@ public class ClusterPrivilegeResolver { Sets.newHashSet(DelegatePkiAuthenticationAction.NAME, InvalidateTokenAction.NAME)); public static final NamedClusterPrivilege MANAGE_OWN_API_KEY = ManageOwnApiKeyClusterPrivilege.INSTANCE; + public static final NamedClusterPrivilege MANAGE_ENRICH = new ActionClusterPrivilege("manage_enrich", MANAGE_ENRICH_AUTOMATON); private static final Map VALUES = Collections.unmodifiableMap( Stream.of( @@ -162,7 +164,8 @@ public class ClusterPrivilegeResolver { MANAGE_SLM, READ_SLM, DELEGATE_PKI, - MANAGE_OWN_API_KEY).collect(Collectors.toMap(cp -> cp.name(), cp -> cp))); + MANAGE_OWN_API_KEY, + MANAGE_ENRICH).collect(Collectors.toMap(cp -> cp.name(), cp -> cp))); /** * Resolves a {@link NamedClusterPrivilege} from a given name if it exists. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 9a9fb8bdbec..ed80944a376 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -264,6 +264,11 @@ public class ReservedRolesStore implements BiConsumer, ActionListene .privileges("view_index_metadata") .allowRestrictedIndices(true) .build() }, null, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null)) + .put("enrich_user", new RoleDescriptor("enrich_user", new String[]{ "manage_enrich", "manage_ingest_pipelines", "monitor" }, + new RoleDescriptor.IndicesPrivileges[]{ RoleDescriptor.IndicesPrivileges.builder() + .indices(".enrich-*") + .privileges("manage", "read", "write") + .build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .immutableMap(); } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 89b97abac61..fb7d2d7764a 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -1093,6 +1093,62 @@ } } } + }, + "enrich_coordinator_stats" : { + "properties": { + "node_id": { + "type": "keyword" + }, + "queue_size": { + "type": "integer" + }, + "remote_requests_current" : { + "type": "long" + }, + "remote_requests_total" : { + "type": "long" + }, + "executed_searches_total" : { + "type": "long" + } + } + }, + "enrich_executing_policy_stats": { + "properties": { + "name": { + "type": "keyword" + }, + "task": { + "type": "object", + "properties": { + "node": { + "type": "keyword" + }, + "id": { + "type": "long" + }, + "type": { + "type": "keyword" + }, + "action": { + "type": "keyword" + }, + "description": { + "type": "keyword" + }, + "start_time_in_millis": { + "type": "date", + "format": "epoch_millis" + }, + "running_time_in_nanos": { + "type": "long" + }, + "cancellable": { + "type": "boolean" + } + } + } + } } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index e02c9301016..3a01936de7d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -8,6 +8,10 @@ package org.elasticsearch.xpack.core.security.authz.privilege; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; @@ -177,6 +181,15 @@ public class PrivilegeTests extends ESTestCase { } + public void testManageEnrichPrivilege() { + verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, DeleteEnrichPolicyAction.NAME); + verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, ExecuteEnrichPolicyAction.NAME); + verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, GetEnrichPolicyAction.NAME); + verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, PutEnrichPolicyAction.NAME); + verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, "cluster:admin/xpack/enrich/brand_new_api"); + verifyClusterActionDenied(ClusterPrivilegeResolver.MANAGE_ENRICH, "cluster:admin/xpack/whatever"); + } + public void testIlmPrivileges() { { verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ILM, "cluster:admin/ilm/delete", diff --git a/x-pack/plugin/enrich/build.gradle b/x-pack/plugin/enrich/build.gradle new file mode 100644 index 00000000000..5d40ad3ff6d --- /dev/null +++ b/x-pack/plugin/enrich/build.gradle @@ -0,0 +1,40 @@ +evaluationDependsOn(xpackModule('core')) + +apply plugin: 'elasticsearch.esplugin' +esplugin { + name 'x-pack-enrich' + description 'Elasticsearch Expanded Pack Plugin - Enrich' + classname 'org.elasticsearch.xpack.enrich.EnrichPlugin' + extendedPlugins = ['x-pack-core'] +} +archivesBaseName = 'x-pack-enrich' + +dependencies { + compileOnly project(path: xpackModule('core'), configuration: 'default') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: ':modules:ingest-common') + testCompile project(path: xpackModule('monitoring'), configuration: 'testArtifacts') +} + +// No real integ tests in the module: +integTest.enabled = false + +// Instead we create a separate task to run the tests based on ESIntegTestCase +task internalClusterTest(type: Test) { + description = '🌈🌈🌈🦄 Welcome to fantasy integration tests land! 🦄🌈🌈🌈' + mustRunAfter test + + include '**/*IT.class' + systemProperty 'es.set.netty.runtime.available.processors', 'false' +} + +check.dependsOn internalClusterTest + +// add all sub-projects of the qa sub-project +gradle.projectsEvaluated { + project.subprojects + .find { it.path == project.path + ":qa" } + .subprojects + .findAll { it.path.startsWith(project.path + ":qa") } + .each { check.dependsOn it.check } +} diff --git a/x-pack/plugin/enrich/qa/build.gradle b/x-pack/plugin/enrich/qa/build.gradle new file mode 100644 index 00000000000..d3e95d997c3 --- /dev/null +++ b/x-pack/plugin/enrich/qa/build.gradle @@ -0,0 +1,17 @@ +import org.elasticsearch.gradle.test.RestIntegTestTask + +apply plugin: 'elasticsearch.build' +test.enabled = false + +dependencies { + compile project(':test:framework') +} + +subprojects { + project.tasks.withType(RestIntegTestTask) { + final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') + project.copyRestSpec.from(xPackResources) { + include 'rest-api-spec/api/**' + } + } +} diff --git a/x-pack/plugin/enrich/qa/common/build.gradle b/x-pack/plugin/enrich/qa/common/build.gradle new file mode 100644 index 00000000000..4b8496dc2fd --- /dev/null +++ b/x-pack/plugin/enrich/qa/common/build.gradle @@ -0,0 +1,6 @@ +apply plugin: 'elasticsearch.build' +test.enabled = false + +dependencies { + compile project(':test:framework') +} \ No newline at end of file diff --git a/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java b/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java new file mode 100644 index 00000000000..06e15730788 --- /dev/null +++ b/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.test.enrich; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.After; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public abstract class CommonEnrichRestTestCase extends ESRestTestCase { + + @After + public void deletePolicies() throws Exception { + Map responseMap = toMap(adminClient().performRequest(new Request("GET", "/_enrich/policy"))); + @SuppressWarnings("unchecked") + List> policies = (List>) responseMap.get("policies"); + + for (Map entry: policies) { + client().performRequest(new Request("DELETE", "/_enrich/policy/" + + XContentMapValues.extractValue("config.match.name", entry))); + } + } + + @Override + protected boolean preserveIndicesUponCompletion() { + // In order to avoid monitoring from failing exporting docs to monitor index. + return true; + } + + private void setupGenericLifecycleTest(boolean deletePipeilne) throws Exception { + // Create the policy: + Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy"); + putPolicyRequest.setJsonEntity(generatePolicySource("my-source-index")); + assertOK(client().performRequest(putPolicyRequest)); + + // Add entry to source index and then refresh: + Request indexRequest = new Request("PUT", "/my-source-index/_doc/elastic.co"); + indexRequest.setJsonEntity("{\"host\": \"elastic.co\",\"globalRank\": 25,\"tldRank\": 7,\"tld\": \"co\"}"); + assertOK(client().performRequest(indexRequest)); + Request refreshRequest = new Request("POST", "/my-source-index/_refresh"); + assertOK(client().performRequest(refreshRequest)); + + // Execute the policy: + Request executePolicyRequest = new Request("POST", "/_enrich/policy/my_policy/_execute"); + assertOK(client().performRequest(executePolicyRequest)); + + // Create pipeline + Request putPipelineRequest = new Request("PUT", "/_ingest/pipeline/my_pipeline"); + putPipelineRequest.setJsonEntity("{\"processors\":[" + + "{\"enrich\":{\"policy_name\":\"my_policy\",\"field\":\"host\",\"target_field\":\"entry\"}}" + + "]}"); + assertOK(client().performRequest(putPipelineRequest)); + + // Index document using pipeline with enrich processor: + indexRequest = new Request("PUT", "/my-index/_doc/1"); + indexRequest.addParameter("pipeline", "my_pipeline"); + indexRequest.setJsonEntity("{\"host\": \"elastic.co\"}"); + assertOK(client().performRequest(indexRequest)); + + // Check if document has been enriched + Request getRequest = new Request("GET", "/my-index/_doc/1"); + Map response = toMap(client().performRequest(getRequest)); + Map entry = (Map) ((Map) response.get("_source")).get("entry"); + assertThat(entry.size(), equalTo(4)); + assertThat(entry.get("host"), equalTo("elastic.co")); + assertThat(entry.get("tld"), equalTo("co")); + assertThat(entry.get("globalRank"), equalTo(25)); + assertThat(entry.get("tldRank"), equalTo(7)); + + if (deletePipeilne) { + // delete the pipeline so the policies can be deleted + client().performRequest(new Request("DELETE", "/_ingest/pipeline/my_pipeline")); + } + } + + public void testBasicFlow() throws Exception { + setupGenericLifecycleTest(true); + assertBusy(CommonEnrichRestTestCase::verifyEnrichMonitoring, 1, TimeUnit.MINUTES); + } + + public void testImmutablePolicy() throws IOException { + Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy"); + putPolicyRequest.setJsonEntity(generatePolicySource("my-source-index")); + assertOK(client().performRequest(putPolicyRequest)); + + ResponseException exc = expectThrows(ResponseException.class, () -> client().performRequest(putPolicyRequest)); + assertTrue(exc.getMessage().contains("policy [my_policy] already exists")); + } + + public void testDeleteIsCaseSensitive() throws Exception { + Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy"); + putPolicyRequest.setJsonEntity(generatePolicySource("my-source-index")); + assertOK(client().performRequest(putPolicyRequest)); + + ResponseException exc = expectThrows(ResponseException.class, + () -> client().performRequest(new Request("DELETE", "/_enrich/policy/MY_POLICY"))); + assertTrue(exc.getMessage().contains("policy [MY_POLICY] not found")); + } + + public void testDeleteExistingPipeline() throws Exception { + // lets not delete the pipeline at first, to test the failure + setupGenericLifecycleTest(false); + + Request putPipelineRequest = new Request("PUT", "/_ingest/pipeline/another_pipeline"); + putPipelineRequest.setJsonEntity("{\"processors\":[" + + "{\"enrich\":{\"policy_name\":\"my_policy\",\"field\":\"host\",\"target_field\":\"entry\"}}" + + "]}"); + assertOK(client().performRequest(putPipelineRequest)); + + ResponseException exc = expectThrows(ResponseException.class, + () -> client().performRequest(new Request("DELETE", "/_enrich/policy/my_policy"))); + assertTrue(exc.getMessage().contains("Could not delete policy [my_policy] because" + + " a pipeline is referencing it [my_pipeline, another_pipeline]")); + + // delete the pipelines so the policies can be deleted + client().performRequest(new Request("DELETE", "/_ingest/pipeline/my_pipeline")); + client().performRequest(new Request("DELETE", "/_ingest/pipeline/another_pipeline")); + + // verify the delete did not happen + Request getRequest = new Request("GET", "/_enrich/policy/my_policy"); + assertOK(client().performRequest(getRequest)); + } + + public static String generatePolicySource(String index) throws IOException { + XContentBuilder source = jsonBuilder().startObject().startObject("match"); + { + source.field("indices", index); + if (randomBoolean()) { + source.field("query", QueryBuilders.matchAllQuery()); + } + source.field("match_field", "host"); + source.field("enrich_fields", new String[] {"globalRank", "tldRank", "tld"}); + } + source.endObject().endObject(); + return Strings.toString(source); + } + + private static Map toMap(Response response) throws IOException { + return toMap(EntityUtils.toString(response.getEntity())); + } + + private static Map toMap(String response) { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); + } + + private static void verifyEnrichMonitoring() throws IOException { + Request request = new Request("GET", "/.monitoring-*/_search"); + request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"enrich_coordinator_stats\"}}}"); + Map response; + try { + response = toMap(adminClient().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } + + int maxRemoteRequestsTotal = 0; + int maxExecutedSearchesTotal = 0; + + List hits = (List) XContentMapValues.extractValue("hits.hits", response); + assertThat(hits.size(), greaterThanOrEqualTo(1)); + + for (int i = 0; i < hits.size(); i++) { + Map hit = (Map) hits.get(i); + + int foundRemoteRequestsTotal = + (int) XContentMapValues.extractValue("_source.enrich_coordinator_stats.remote_requests_total", hit); + maxRemoteRequestsTotal = Math.max(maxRemoteRequestsTotal, foundRemoteRequestsTotal); + int foundExecutedSearchesTotal = + (int) XContentMapValues.extractValue("_source.enrich_coordinator_stats.executed_searches_total", hit); + maxExecutedSearchesTotal = Math.max(maxExecutedSearchesTotal, foundExecutedSearchesTotal); + } + + assertThat(maxRemoteRequestsTotal, greaterThanOrEqualTo(1)); + assertThat(maxExecutedSearchesTotal, greaterThanOrEqualTo(1)); + } +} diff --git a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle new file mode 100644 index 00000000000..91427991e3d --- /dev/null +++ b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle @@ -0,0 +1,19 @@ +apply plugin: 'elasticsearch.testclusters' +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-test' + +dependencies { + testCompile project(path: xpackModule('enrich'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('enrich:qa:common'), configuration: 'runtime')} + +testClusters.integTest { + testDistribution = 'DEFAULT' + extraConfigFile 'roles.yml', file('roles.yml') + user username: "test_admin", password: "x-pack-test-password", role: "superuser" + user username: "test_enrich", password: "x-pack-test-password", role: "enrich_user,integ_test_role" + user username: "test_enrich_no_privs", password: "x-pack-test-password", role: "enrich_no_privs" + setting 'xpack.license.self_generated.type', 'basic' + setting 'xpack.security.enabled', 'true' + setting 'xpack.monitoring.collection.enabled', 'true' +} diff --git a/x-pack/plugin/enrich/qa/rest-with-security/roles.yml b/x-pack/plugin/enrich/qa/rest-with-security/roles.yml new file mode 100644 index 00000000000..1674a8414d6 --- /dev/null +++ b/x-pack/plugin/enrich/qa/rest-with-security/roles.yml @@ -0,0 +1,18 @@ +integ_test_role: + indices: + - names: [ 'my-index', 'my-source-index' ] + privileges: + - manage + - read + - write + +enrich_no_privs: + cluster: + - manage_ingest_pipelines + - monitor + indices: + - names: [ '.enrich-my_policy*', 'my-index', 'my-source-index' ] + privileges: + - manage + - read + - write diff --git a/x-pack/plugin/enrich/qa/rest-with-security/src/test/java/org/elasticsearch/xpack/enrich/EnrichSecurityFailureIT.java b/x-pack/plugin/enrich/qa/rest-with-security/src/test/java/org/elasticsearch/xpack/enrich/EnrichSecurityFailureIT.java new file mode 100644 index 00000000000..4129dcbf920 --- /dev/null +++ b/x-pack/plugin/enrich/qa/rest-with-security/src/test/java/org/elasticsearch/xpack/enrich/EnrichSecurityFailureIT.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.enrich.CommonEnrichRestTestCase; +import org.elasticsearch.test.rest.ESRestTestCase; + +import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; + +public class EnrichSecurityFailureIT extends ESRestTestCase { + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("test_enrich_no_privs", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder() + .put(ThreadContext.PREFIX + ".Authorization", token) + .build(); + } + + @Override + protected Settings restAdminSettings() { + String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder() + .put(ThreadContext.PREFIX + ".Authorization", token) + .build(); + } + + public void testFailure() throws Exception { + Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy"); + putPolicyRequest.setJsonEntity(CommonEnrichRestTestCase.generatePolicySource("my-source-index")); + ResponseException exc = expectThrows(ResponseException.class, () -> client().performRequest(putPolicyRequest)); + assertTrue(exc.getMessage().contains("action [cluster:admin/xpack/enrich/put] is unauthorized for user [test_enrich_no_privs]")); + } +} diff --git a/x-pack/plugin/enrich/qa/rest-with-security/src/test/java/org/elasticsearch/xpack/enrich/EnrichSecurityIT.java b/x-pack/plugin/enrich/qa/rest-with-security/src/test/java/org/elasticsearch/xpack/enrich/EnrichSecurityIT.java new file mode 100644 index 00000000000..0f7838c4a45 --- /dev/null +++ b/x-pack/plugin/enrich/qa/rest-with-security/src/test/java/org/elasticsearch/xpack/enrich/EnrichSecurityIT.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.enrich.CommonEnrichRestTestCase; + +import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.CoreMatchers.containsString; + +public class EnrichSecurityIT extends CommonEnrichRestTestCase { + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("test_enrich", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder() + .put(ThreadContext.PREFIX + ".Authorization", token) + .build(); + } + + @Override + protected Settings restAdminSettings() { + String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder() + .put(ThreadContext.PREFIX + ".Authorization", token) + .build(); + } + + public void testInsufficientPermissionsOnNonExistentIndex() throws Exception { + // This test is here because it requires a valid user that has permission to execute policy PUTs but should fail if the user + // does not have access to read the backing indices used to enrich the data. + Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy"); + putPolicyRequest.setJsonEntity(generatePolicySource("some-other-index")); + ResponseException exc = expectThrows(ResponseException.class, () -> client().performRequest(putPolicyRequest)); + assertThat(exc.getMessage(), + containsString("unable to store policy because no indices match with the specified index patterns [some-other-index]")); + } +} diff --git a/x-pack/plugin/enrich/qa/rest/build.gradle b/x-pack/plugin/enrich/qa/rest/build.gradle new file mode 100644 index 00000000000..c96782f074f --- /dev/null +++ b/x-pack/plugin/enrich/qa/rest/build.gradle @@ -0,0 +1,14 @@ +apply plugin: 'elasticsearch.testclusters' +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-test' + +dependencies { + testCompile project(path: xpackModule('enrich'), configuration: 'runtime') + testCompile project(path: xpackModule('enrich:qa:common'), configuration: 'runtime') +} + +testClusters.integTest { + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'basic' + setting 'xpack.monitoring.collection.enabled', 'true' +} diff --git a/x-pack/plugin/enrich/qa/rest/src/test/java/org/elasticsearch/xpack/enrich/EnrichIT.java b/x-pack/plugin/enrich/qa/rest/src/test/java/org/elasticsearch/xpack/enrich/EnrichIT.java new file mode 100644 index 00000000000..ccc3386ee42 --- /dev/null +++ b/x-pack/plugin/enrich/qa/rest/src/test/java/org/elasticsearch/xpack/enrich/EnrichIT.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.test.enrich.CommonEnrichRestTestCase; + +public class EnrichIT extends CommonEnrichRestTestCase { +} diff --git a/x-pack/plugin/enrich/qa/rest/src/test/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java b/x-pack/plugin/enrich/qa/rest/src/test/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java new file mode 100644 index 00000000000..67d8c5af586 --- /dev/null +++ b/x-pack/plugin/enrich/qa/rest/src/test/java/org/elasticsearch/xpack/enrich/EnrichRestIT.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.enrich; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; + +public class EnrichRestIT extends ESClientYamlSuiteTestCase { + + public EnrichRestIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } + +} diff --git a/x-pack/plugin/enrich/qa/rest/src/test/resources/rest-api-spec/test/enrich/10_basic.yml b/x-pack/plugin/enrich/qa/rest/src/test/resources/rest-api-spec/test/enrich/10_basic.yml new file mode 100644 index 00000000000..2a837d9c3b6 --- /dev/null +++ b/x-pack/plugin/enrich/qa/rest/src/test/resources/rest-api-spec/test/enrich/10_basic.yml @@ -0,0 +1,48 @@ +--- +"Test enrich crud apis": + + - do: + enrich.put_policy: + name: policy-crud + body: + match: + indices: ["bar*"] + match_field: baz + enrich_fields: ["a", "b"] + - is_true: acknowledged + + - do: + enrich.execute_policy: + name: policy-crud + - match: { status.phase: "COMPLETE" } + + - do: + enrich.get_policy: + name: policy-crud + - length: { policies: 1 } + - match: { policies.0.config.match.name: policy-crud } + - match: { policies.0.config.match.indices: ["bar*"] } + - match: { policies.0.config.match.match_field: baz } + - match: { policies.0.config.match.enrich_fields: ["a", "b"] } + + - do: + enrich.get_policy: {} + - length: { policies: 1 } + - match: { policies.0.config.match.name: policy-crud } + - match: { policies.0.config.match.indices: ["bar*"] } + - match: { policies.0.config.match.match_field: baz } + - match: { policies.0.config.match.enrich_fields: ["a", "b"] } + + - do: + enrich.stats: {} + - length: { executing_policies: 0} + - length: { coordinator_stats: 1} + - match: { coordinator_stats.0.queue_size: 0} + - match: { coordinator_stats.0.remote_requests_current: 0} + - gte: { coordinator_stats.0.remote_requests_total: 0} + - gte: { coordinator_stats.0.executed_searches_total: 0} + + - do: + enrich.delete_policy: + name: policy-crud + - is_true: acknowledged diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java new file mode 100644 index 00000000000..4889b4ed3c4 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.enrich.action.EnrichCoordinatorProxyAction; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; + +public abstract class AbstractEnrichProcessor extends AbstractProcessor { + + private final String policyName; + private final BiConsumer> searchRunner; + private final String field; + private final String targetField; + private final boolean ignoreMissing; + private final boolean overrideEnabled; + protected final String matchField; + protected final int maxMatches; + + protected AbstractEnrichProcessor(String tag, Client client, String policyName, String field, String targetField, + boolean ignoreMissing, boolean overrideEnabled, String matchField, int maxMatches) { + this(tag, createSearchRunner(client), policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches); + } + + protected AbstractEnrichProcessor(String tag, + BiConsumer> searchRunner, + String policyName, String field, String targetField, boolean ignoreMissing, boolean overrideEnabled, + String matchField, int maxMatches) { + super(tag); + this.policyName = policyName; + this.searchRunner = searchRunner; + this.field = field; + this.targetField = targetField; + this.ignoreMissing = ignoreMissing; + this.overrideEnabled = overrideEnabled; + this.matchField = matchField; + this.maxMatches = maxMatches; + } + + public abstract QueryBuilder getQueryBuilder(Object fieldValue); + + @Override + public void execute(IngestDocument ingestDocument, BiConsumer handler) { + try { + // If a document does not have the enrich key, return the unchanged document + final Object value = ingestDocument.getFieldValue(field, Object.class, ignoreMissing); + if (value == null) { + handler.accept(ingestDocument, null); + return; + } + + QueryBuilder queryBuilder = getQueryBuilder(value); + ConstantScoreQueryBuilder constantScore = new ConstantScoreQueryBuilder(queryBuilder); + SearchSourceBuilder searchBuilder = new SearchSourceBuilder(); + searchBuilder.from(0); + searchBuilder.size(maxMatches); + searchBuilder.trackScores(false); + searchBuilder.fetchSource(true); + searchBuilder.query(constantScore); + SearchRequest req = new SearchRequest(); + req.indices(EnrichPolicy.getBaseName(getPolicyName())); + req.preference(Preference.LOCAL.type()); + req.source(searchBuilder); + + searchRunner.accept(req, (searchResponse, e) -> { + if (e != null) { + handler.accept(null, e); + return; + } + + // If the index is empty, return the unchanged document + // If the enrich key does not exist in the index, throw an error + // If no documents match the key, return the unchanged document + SearchHit[] searchHits = searchResponse.getHits().getHits(); + if (searchHits.length < 1) { + handler.accept(ingestDocument, null); + return; + } + + if (overrideEnabled || ingestDocument.hasField(targetField) == false) { + if (maxMatches == 1) { + Map firstDocument = searchHits[0].getSourceAsMap(); + ingestDocument.setFieldValue(targetField, firstDocument); + } else { + List> enrichDocuments = new ArrayList<>(searchHits.length); + for (SearchHit searchHit : searchHits) { + Map enrichDocument = searchHit.getSourceAsMap(); + enrichDocuments.add(enrichDocument); + } + ingestDocument.setFieldValue(targetField, enrichDocuments); + } + } + handler.accept(ingestDocument, null); + }); + } catch (Exception e) { + handler.accept(null, e); + } + } + + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + throw new UnsupportedOperationException("this method should not get executed"); + } + + public String getPolicyName() { + return policyName; + } + + @Override + public String getType() { + return EnrichProcessorFactory.TYPE; + } + + String getField() { + return field; + } + + public String getTargetField() { + return targetField; + } + + boolean isIgnoreMissing() { + return ignoreMissing; + } + + boolean isOverrideEnabled() { + return overrideEnabled; + } + + public String getMatchField() { + return matchField; + } + + int getMaxMatches() { + return maxMatches; + } + + private static BiConsumer> createSearchRunner(Client client) { + return (req, handler) -> { + client.execute(EnrichCoordinatorProxyAction.INSTANCE, req, ActionListener.wrap( + resp -> { + handler.accept(resp, null); + }, + e -> { + handler.accept(null, e); + })); + }; + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichMetadata.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichMetadata.java new file mode 100644 index 00000000000..264a4f06481 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichMetadata.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.AbstractNamedDiffable; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.io.IOException; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Encapsulates enrich policies as custom metadata inside cluster state. + */ +public final class EnrichMetadata extends AbstractNamedDiffable implements XPackPlugin.XPackMetaDataCustom { + + static final String TYPE = "enrich"; + + static final ParseField POLICIES = new ParseField("policies"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "enrich_metadata", + args -> new EnrichMetadata((Map) args[0]) + ); + + static { + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> { + Map patterns = new HashMap<>(); + String fieldName = null; + for (XContentParser.Token token = p.nextToken(); token != XContentParser.Token.END_OBJECT; token = p.nextToken()) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = p.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + patterns.put(fieldName, EnrichPolicy.fromXContent(p)); + } else { + throw new ElasticsearchParseException("unexpected token [" + token + "]"); + } + } + return patterns; + }, POLICIES); + } + + public static EnrichMetadata fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + private final Map policies; + + public EnrichMetadata(StreamInput in) throws IOException { + this(in.readMap(StreamInput::readString, EnrichPolicy::new)); + } + + public EnrichMetadata(Map policies) { + this.policies = Collections.unmodifiableMap(policies); + } + + public Map getPolicies() { + return policies; + } + + @Override + public EnumSet context() { + return MetaData.ALL_CONTEXTS; + } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_5_0; + } + + @Override + public String getWriteableName() { + return TYPE; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(policies, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(POLICIES.getPreferredName()); + for (Map.Entry entry : policies.entrySet()) { + builder.startObject(entry.getKey()); + builder.value(entry.getValue()); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnrichMetadata that = (EnrichMetadata) o; + return policies.equals(that.policies); + } + + @Override + public int hashCode() { + return Objects.hash(policies); + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java new file mode 100644 index 00000000000..be7e40be8c4 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java @@ -0,0 +1,203 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.NamedDiff; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.IngestPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.action.EnrichCoordinatorProxyAction; +import org.elasticsearch.xpack.enrich.action.EnrichCoordinatorStatsAction; +import org.elasticsearch.xpack.enrich.action.EnrichShardMultiSearchAction; +import org.elasticsearch.xpack.enrich.action.TransportDeleteEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.action.TransportEnrichStatsAction; +import org.elasticsearch.xpack.enrich.action.TransportExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.action.TransportGetEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.action.TransportPutEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.rest.RestDeleteEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.rest.RestEnrichStatsAction; +import org.elasticsearch.xpack.enrich.rest.RestExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.rest.RestGetEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.rest.RestPutEnrichPolicyAction; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.xpack.core.XPackSettings.ENRICH_ENABLED_SETTING; + +public class EnrichPlugin extends Plugin implements ActionPlugin, IngestPlugin { + + static final Setting ENRICH_FETCH_SIZE_SETTING = + Setting.intSetting("enrich.fetch_size", 10000, 1, 1000000, Setting.Property.NodeScope); + + static final Setting ENRICH_MAX_CONCURRENT_POLICY_EXECUTIONS = + Setting.intSetting("enrich.max_concurrent_policy_executions", 50, 1, Setting.Property.NodeScope); + + static final Setting ENRICH_CLEANUP_PERIOD = + Setting.timeSetting("enrich.cleanup_period", new TimeValue(15, TimeUnit.MINUTES), Setting.Property.NodeScope); + + public static final Setting COORDINATOR_PROXY_MAX_CONCURRENT_REQUESTS = + Setting.intSetting("enrich.coordinator_proxy.max_concurrent_requests", 8, 1, 10000, Setting.Property.NodeScope); + + public static final Setting COORDINATOR_PROXY_MAX_LOOKUPS_PER_REQUEST = + Setting.intSetting("enrich.coordinator_proxy.max_lookups_per_request", 128, 1, 10000, Setting.Property.NodeScope); + + static final Setting ENRICH_MAX_FORCE_MERGE_ATTEMPTS = + Setting.intSetting("enrich.max_force_merge_attempts", 3, 1, 10, Setting.Property.NodeScope); + + private static final String QUEUE_CAPACITY_SETTING_NAME = "enrich.coordinator_proxy.queue_capacity"; + public static final Setting COORDINATOR_PROXY_QUEUE_CAPACITY = new Setting<>(QUEUE_CAPACITY_SETTING_NAME, + settings -> { + int maxConcurrentRequests = COORDINATOR_PROXY_MAX_CONCURRENT_REQUESTS.get(settings); + int maxLookupsPerRequest = COORDINATOR_PROXY_MAX_LOOKUPS_PER_REQUEST.get(settings); + return String.valueOf(maxConcurrentRequests * maxLookupsPerRequest); + }, + val -> Setting.parseInt(val, 1, Integer.MAX_VALUE, QUEUE_CAPACITY_SETTING_NAME), + Setting.Property.NodeScope); + + private final Settings settings; + private final Boolean enabled; + private final boolean transportClientMode; + + public EnrichPlugin(final Settings settings) { + this.settings = settings; + this.enabled = ENRICH_ENABLED_SETTING.get(settings); + this.transportClientMode = XPackPlugin.transportClientMode(settings); + } + + @Override + public Map getProcessors(Processor.Parameters parameters) { + if (enabled == false) { + return emptyMap(); + } + + EnrichProcessorFactory factory = new EnrichProcessorFactory(parameters.client); + parameters.ingestService.addIngestClusterStateListener(factory); + return Collections.singletonMap(EnrichProcessorFactory.TYPE, factory); + } + + protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } + + public List> getActions() { + if (enabled == false) { + return emptyList(); + } + + return Arrays.asList( + new ActionHandler<>(GetEnrichPolicyAction.INSTANCE, TransportGetEnrichPolicyAction.class), + new ActionHandler<>(DeleteEnrichPolicyAction.INSTANCE, TransportDeleteEnrichPolicyAction.class), + new ActionHandler<>(PutEnrichPolicyAction.INSTANCE, TransportPutEnrichPolicyAction.class), + new ActionHandler<>(ExecuteEnrichPolicyAction.INSTANCE, TransportExecuteEnrichPolicyAction.class), + new ActionHandler<>(EnrichStatsAction.INSTANCE, TransportEnrichStatsAction.class), + new ActionHandler<>(EnrichCoordinatorProxyAction.INSTANCE, EnrichCoordinatorProxyAction.TransportAction.class), + new ActionHandler<>(EnrichShardMultiSearchAction.INSTANCE, EnrichShardMultiSearchAction.TransportAction.class), + new ActionHandler<>(EnrichCoordinatorStatsAction.INSTANCE, EnrichCoordinatorStatsAction.TransportAction.class) + ); + } + + public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster) { + if (enabled == false) { + return emptyList(); + } + + return Arrays.asList( + new RestGetEnrichPolicyAction(restController), + new RestDeleteEnrichPolicyAction(restController), + new RestPutEnrichPolicyAction(restController), + new RestExecuteEnrichPolicyAction(restController), + new RestEnrichStatsAction(restController) + ); + } + + @Override + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + if (enabled == false || transportClientMode) { + return emptyList(); + } + + EnrichPolicyLocks enrichPolicyLocks = new EnrichPolicyLocks(); + EnrichPolicyMaintenanceService enrichPolicyMaintenanceService = new EnrichPolicyMaintenanceService(settings, client, + clusterService, threadPool, enrichPolicyLocks); + enrichPolicyMaintenanceService.initialize(); + return Arrays.asList( + enrichPolicyLocks, + new EnrichCoordinatorProxyAction.Coordinator(client, settings), + enrichPolicyMaintenanceService + ); + } + + @Override + public List getNamedWriteables() { + return Arrays.asList( + new NamedWriteableRegistry.Entry(MetaData.Custom.class, EnrichMetadata.TYPE, EnrichMetadata::new), + new NamedWriteableRegistry.Entry(NamedDiff.class, EnrichMetadata.TYPE, + in -> EnrichMetadata.readDiffFrom(MetaData.Custom.class, EnrichMetadata.TYPE, in)) + ); + } + + public List getNamedXContent() { + return Arrays.asList( + new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField(EnrichMetadata.TYPE), EnrichMetadata::fromXContent) + ); + } + + @Override + public List> getSettings() { + return Arrays.asList( + ENRICH_FETCH_SIZE_SETTING, + ENRICH_MAX_CONCURRENT_POLICY_EXECUTIONS, + ENRICH_CLEANUP_PERIOD, + COORDINATOR_PROXY_MAX_CONCURRENT_REQUESTS, + COORDINATOR_PROXY_MAX_LOOKUPS_PER_REQUEST, + COORDINATOR_PROXY_QUEUE_CAPACITY, + ENRICH_MAX_FORCE_MERGE_ATTEMPTS + ); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java new file mode 100644 index 00000000000..361f4f6b285 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutor.java @@ -0,0 +1,203 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.enrich; + +import java.util.Map; +import java.util.concurrent.Semaphore; +import java.util.function.BiConsumer; +import java.util.function.LongSupplier; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskAwareRequest; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskListener; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; + +public class EnrichPolicyExecutor { + + private final ClusterService clusterService; + private final Client client; + private final TaskManager taskManager; + private final ThreadPool threadPool; + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final LongSupplier nowSupplier; + private final int fetchSize; + private final EnrichPolicyLocks policyLocks; + private final int maximumConcurrentPolicyExecutions; + private final int maxForceMergeAttempts; + private final Semaphore policyExecutionPermits; + + public EnrichPolicyExecutor(Settings settings, + ClusterService clusterService, + Client client, + TaskManager taskManager, + ThreadPool threadPool, + IndexNameExpressionResolver indexNameExpressionResolver, + EnrichPolicyLocks policyLocks, + LongSupplier nowSupplier) { + this.clusterService = clusterService; + this.client = client; + this.taskManager = taskManager; + this.threadPool = threadPool; + this.indexNameExpressionResolver = indexNameExpressionResolver; + this.nowSupplier = nowSupplier; + this.policyLocks = policyLocks; + this.fetchSize = EnrichPlugin.ENRICH_FETCH_SIZE_SETTING.get(settings); + this.maximumConcurrentPolicyExecutions = EnrichPlugin.ENRICH_MAX_CONCURRENT_POLICY_EXECUTIONS.get(settings); + this.maxForceMergeAttempts = EnrichPlugin.ENRICH_MAX_FORCE_MERGE_ATTEMPTS.get(settings); + this.policyExecutionPermits = new Semaphore(maximumConcurrentPolicyExecutions); + } + + private void tryLockingPolicy(String policyName) { + policyLocks.lockPolicy(policyName); + if (policyExecutionPermits.tryAcquire() == false) { + // Release policy lock, and throw a different exception + policyLocks.releasePolicy(policyName); + throw new EsRejectedExecutionException("Policy execution failed. Policy execution for [" + policyName + "] would exceed " + + "maximum concurrent policy executions [" + maximumConcurrentPolicyExecutions + "]"); + } + } + + private void releasePolicy(String policyName) { + try { + policyExecutionPermits.release(); + } finally { + policyLocks.releasePolicy(policyName); + } + } + + private class PolicyCompletionListener implements ActionListener { + private final String policyName; + private final ExecuteEnrichPolicyTask task; + private final BiConsumer onResponse; + private final BiConsumer onFailure; + + PolicyCompletionListener(String policyName, ExecuteEnrichPolicyTask task, + BiConsumer onResponse, BiConsumer onFailure) { + this.policyName = policyName; + this.task = task; + this.onResponse = onResponse; + this.onFailure = onFailure; + } + + @Override + public void onResponse(ExecuteEnrichPolicyStatus status) { + assert ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE.equals(status.getPhase()) : "incomplete task returned"; + releasePolicy(policyName); + try { + taskManager.unregister(task); + } finally { + onResponse.accept(task, status); + } + } + + @Override + public void onFailure(Exception e) { + // Set task status to failed to avoid having to catch and rethrow exceptions everywhere + task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.FAILED)); + releasePolicy(policyName); + try { + taskManager.unregister(task); + } finally { + onFailure.accept(task, e); + } + } + } + + protected Runnable createPolicyRunner(String policyName, EnrichPolicy policy, ExecuteEnrichPolicyTask task, + ActionListener listener) { + return new EnrichPolicyRunner(policyName, policy, task, listener, clusterService, client, indexNameExpressionResolver, nowSupplier, + fetchSize, maxForceMergeAttempts); + } + + private EnrichPolicy getPolicy(ExecuteEnrichPolicyAction.Request request) { + // Look up policy in policy store and execute it + EnrichPolicy policy = EnrichStore.getPolicy(request.getName(), clusterService.state()); + if (policy == null) { + throw new IllegalArgumentException("Policy execution failed. Could not locate policy with id [" + request.getName() + "]"); + } + return policy; + } + + public Task runPolicy(ExecuteEnrichPolicyAction.Request request, ActionListener listener) { + return runPolicy(request, getPolicy(request), listener); + } + + public Task runPolicy(ExecuteEnrichPolicyAction.Request request, TaskListener listener) { + return runPolicy(request, getPolicy(request), listener); + } + + public Task runPolicy(ExecuteEnrichPolicyAction.Request request, EnrichPolicy policy, + ActionListener listener) { + return runPolicy(request, policy, (t, r) -> listener.onResponse(r), (t, e) -> listener.onFailure(e)); + } + + public Task runPolicy(ExecuteEnrichPolicyAction.Request request, EnrichPolicy policy, + TaskListener listener) { + return runPolicy(request, policy, listener::onResponse, listener::onFailure); + } + + private Task runPolicy(ExecuteEnrichPolicyAction.Request request, EnrichPolicy policy, + BiConsumer onResponse, BiConsumer onFailure) { + tryLockingPolicy(request.getName()); + try { + return runPolicyTask(request, policy, onResponse, onFailure); + } catch (Exception e) { + // Be sure to unlock if submission failed. + releasePolicy(request.getName()); + throw e; + } + } + + private Task runPolicyTask(final ExecuteEnrichPolicyAction.Request request, EnrichPolicy policy, + BiConsumer onResponse, BiConsumer onFailure) { + Task asyncTask = taskManager.register("enrich", "policy_execution", new TaskAwareRequest() { + @Override + public void setParentTask(TaskId taskId) { + request.setParentTask(taskId); + } + + @Override + public TaskId getParentTask() { + return request.getParentTask(); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new ExecuteEnrichPolicyTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return request.getName(); + } + }); + ExecuteEnrichPolicyTask task = (ExecuteEnrichPolicyTask) asyncTask; + try { + task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.SCHEDULED)); + PolicyCompletionListener completionListener = new PolicyCompletionListener(request.getName(), task, onResponse, onFailure); + Runnable runnable = createPolicyRunner(request.getName(), policy, task, completionListener); + threadPool.executor(ThreadPool.Names.GENERIC).execute(runnable); + return asyncTask; + } catch (Exception e) { + // Unregister task in case of exception + task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.FAILED)); + taskManager.unregister(asyncTask); + throw e; + } + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyLocks.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyLocks.java new file mode 100644 index 00000000000..44673c8ef8d --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyLocks.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; + +/** + * A coordination object that allows multiple distinct polices to be executed concurrently, but also makes sure that a single + * policy can only have one execution in flight at a time. Additionally, this class allows for capturing the current execution + * state of any policy executions in flight. This execution state can be captured and then later be used to verify that no policy + * executions have started in the time between the first state capturing. + */ +public class EnrichPolicyLocks { + + /** + * A snapshot in time detailing if any policy executions are in flight and total number of local executions that + * have been kicked off since the node has started + */ + public static class EnrichPolicyExecutionState { + final boolean anyPolicyInFlight; + final long executions; + + EnrichPolicyExecutionState(boolean anyPolicyInFlight, long executions) { + this.anyPolicyInFlight = anyPolicyInFlight; + this.executions = executions; + } + + public boolean isAnyPolicyInFlight() { + return anyPolicyInFlight; + } + } + + /** + * A read-write lock that allows for policies to be executed concurrently with minimal overhead, but allows for blocking + * policy locking operations while capturing the state of policy executions. + */ + private final ReadWriteLock currentStateLock = new ReentrantReadWriteLock(true); + + /** + * A mapping of policy name to a semaphore used for ensuring that a single policy can only have one execution in flight + * at a time. + */ + private final ConcurrentHashMap policyLocks = new ConcurrentHashMap<>(); + + /** + * A counter that is used as a sort of policy execution sequence id / dirty bit. This is incremented every time a policy + * successfully acquires an execution lock. + */ + private final AtomicLong policyRunCounter = new AtomicLong(0L); + + /** + * Locks a policy to prevent concurrent execution. If the policy is currently executing, this method will immediately + * throw without waiting. This method only blocks if another thread is currently capturing the current policy execution state. + * @param policyName The policy name to lock for execution + * @throws EsRejectedExecutionException if the policy is locked already or if the maximum number of concurrent policy executions + * has been reached + */ + public void lockPolicy(String policyName) { + currentStateLock.readLock().lock(); + try { + Semaphore runLock = policyLocks.computeIfAbsent(policyName, (name) -> new Semaphore(1)); + boolean acquired = runLock.tryAcquire(); + if (acquired == false) { + throw new EsRejectedExecutionException("Could not obtain lock because policy execution for [" + policyName + + "] is already in progress."); + } + policyRunCounter.incrementAndGet(); + } finally { + currentStateLock.readLock().unlock(); + } + } + + /** + * Captures a snapshot of the current policy execution state. This method never blocks, instead assuming that a policy is + * currently starting its execution and returns an appropriate state. + * @return The current state of in-flight policy executions + */ + public EnrichPolicyExecutionState captureExecutionState() { + if (currentStateLock.writeLock().tryLock()) { + try { + long revision = policyRunCounter.get(); + long currentPolicyExecutions = policyLocks.mappingCount(); + return new EnrichPolicyExecutionState(currentPolicyExecutions > 0L, revision); + } finally { + currentStateLock.writeLock().unlock(); + } + } + return new EnrichPolicyExecutionState(true, policyRunCounter.get()); + } + + /** + * Checks if the current execution state matches that of the given execution state. Used to ensure that over a period of time + * no changes to the policy execution state have occurred. + * @param previousState The previous state to check the current state against + * @return true if the current state matches the given previous state, false if policy executions have changed over time. + */ + boolean isSameState(EnrichPolicyExecutionState previousState) { + EnrichPolicyExecutionState currentState = captureExecutionState(); + return currentState.anyPolicyInFlight == previousState.anyPolicyInFlight && + currentState.executions == previousState.executions; + } + + /** + * Releases the lock for a given policy name, allowing it to be executed. + * @param policyName The policy to release. + */ + public void releasePolicy(String policyName) { + currentStateLock.readLock().lock(); + try { + policyLocks.remove(policyName); + } finally { + currentStateLock.readLock().unlock(); + } + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceService.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceService.java new file mode 100644 index 00000000000..4408dea7eb7 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceService.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.LocalNodeMasterListener; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.component.LifecycleListener; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.xcontent.ObjectPath; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Semaphore; + +public class EnrichPolicyMaintenanceService implements LocalNodeMasterListener { + + private static final Logger logger = LogManager.getLogger(EnrichPolicyMaintenanceService.class); + + private static final String MAPPING_POLICY_FIELD_PATH = "_meta." + EnrichPolicyRunner.ENRICH_POLICY_NAME_FIELD_NAME; + private static final IndicesOptions IGNORE_UNAVAILABLE = IndicesOptions.fromOptions(true, false, false, false); + + private final Settings settings; + private final Client client; + private final ClusterService clusterService; + private final ThreadPool threadPool; + private final EnrichPolicyLocks enrichPolicyLocks; + + private volatile boolean isMaster = false; + private volatile Scheduler.Cancellable cancellable; + private final Semaphore maintenanceLock = new Semaphore(1); + + EnrichPolicyMaintenanceService(Settings settings, Client client, ClusterService clusterService, ThreadPool threadPool, + EnrichPolicyLocks enrichPolicyLocks) { + this.settings = settings; + this.client = client; + this.clusterService = clusterService; + this.threadPool = threadPool; + this.enrichPolicyLocks = enrichPolicyLocks; + } + + void initialize() { + clusterService.addLocalNodeMasterListener(this); + } + + @Override + public void onMaster() { + if (cancellable == null || cancellable.isCancelled()) { + isMaster = true; + scheduleNext(); + clusterService.addLifecycleListener(new LifecycleListener() { + @Override + public void beforeStop() { + offMaster(); + } + }); + } + } + + @Override + public void offMaster() { + if (cancellable != null && cancellable.isCancelled() == false) { + isMaster = false; + cancellable.cancel(); + } + } + + @Override + public String executorName() { + return ThreadPool.Names.GENERIC; + } + + private void scheduleNext() { + if (isMaster) { + try { + TimeValue waitTime = EnrichPlugin.ENRICH_CLEANUP_PERIOD.get(settings); + cancellable = threadPool.schedule(this::execute, waitTime, ThreadPool.Names.GENERIC); + } catch (EsRejectedExecutionException e) { + if (e.isExecutorShutdown()) { + logger.debug("Failed to schedule next [enrich] maintenance task; Shutting down", e); + } else { + throw e; + } + } + } else { + logger.debug("No longer master; Skipping next scheduled [enrich] maintenance task"); + } + } + + private void execute() { + logger.debug("Triggering scheduled [enrich] maintenance task"); + if (isMaster) { + maybeCleanUpEnrichIndices(); + scheduleNext(); + } else { + logger.debug("No longer master; Skipping next scheduled [enrich] maintenance task"); + } + } + + private void maybeCleanUpEnrichIndices() { + if (maintenanceLock.tryAcquire()) { + cleanUpEnrichIndices(); + } else { + logger.debug("Previous [enrich] maintenance task still in progress; Skipping this execution"); + } + } + + void concludeMaintenance() { + maintenanceLock.release(); + } + + void cleanUpEnrichIndices() { + final Map policies = EnrichStore.getPolicies(clusterService.state()); + GetIndexRequest indices = new GetIndexRequest() + .indices(EnrichPolicy.ENRICH_INDEX_NAME_BASE + "*") + .indicesOptions(IndicesOptions.lenientExpand()); + // Check that no enrich policies are being executed + final EnrichPolicyLocks.EnrichPolicyExecutionState executionState = enrichPolicyLocks.captureExecutionState(); + if (executionState.isAnyPolicyInFlight() == false) { + client.admin().indices().getIndex(indices, new ActionListener() { + @Override + public void onResponse(GetIndexResponse getIndexResponse) { + // Ensure that no enrich policy executions started while we were retrieving the snapshot of index data + // If executions were kicked off, we can't be sure that the indices we are about to process are a + // stable state of the system (they could be new indices created by a policy that hasn't been published yet). + if (enrichPolicyLocks.isSameState(executionState)) { + String[] removeIndices = Arrays.stream(getIndexResponse.getIndices()) + .filter(indexName -> shouldRemoveIndex(getIndexResponse, policies, indexName)) + .toArray(String[]::new); + deleteIndices(removeIndices); + } else { + logger.debug("Skipping enrich index cleanup since enrich policy was executed while gathering indices"); + concludeMaintenance(); + } + } + + @Override + public void onFailure(Exception e) { + logger.error("Failed to get indices during enrich index maintenance task", e); + concludeMaintenance(); + } + }); + } else { + concludeMaintenance(); + } + } + + private boolean shouldRemoveIndex(GetIndexResponse getIndexResponse, Map policies, String indexName) { + // Find the policy on the index + logger.debug("Checking if should remove enrich index [{}]", indexName); + ImmutableOpenMap indexMapping = getIndexResponse.getMappings().get(indexName); + MappingMetaData mappingMetaData = indexMapping.get(MapperService.SINGLE_MAPPING_NAME); + Map mapping = mappingMetaData.getSourceAsMap(); + String policyName = ObjectPath.eval(MAPPING_POLICY_FIELD_PATH, mapping); + // Check if index has a corresponding policy + if (policyName == null || policies.containsKey(policyName) == false) { + // No corresponding policy. Index should be marked for removal. + logger.debug("Enrich index [{}] does not correspond to any existing policy. Found policy name [{}]", indexName, policyName); + return true; + } + // Check if index is currently linked to an alias + final String aliasName = EnrichPolicy.getBaseName(policyName); + List aliasMetadata = getIndexResponse.aliases().get(indexName); + if (aliasMetadata == null) { + logger.debug("Enrich index [{}] is not marked as a live index since it has no alias information", indexName); + return true; + } + boolean hasAlias = aliasMetadata + .stream() + .anyMatch((aliasMetaData -> aliasMetaData.getAlias().equals(aliasName))); + // Index is not currently published to the enrich alias. Should be marked for removal. + if (hasAlias == false) { + logger.debug("Enrich index [{}] is not marked as a live index since it lacks the alias [{}]", indexName, aliasName); + return true; + } + logger.debug("Enrich index [{}] was spared since it is associated with the valid policy [{}] and references alias [{}]", + indexName, policyName, aliasName); + return false; + } + + private void deleteIndices(String[] removeIndices) { + if (removeIndices.length != 0) { + DeleteIndexRequest deleteIndices = new DeleteIndexRequest() + .indices(removeIndices) + .indicesOptions(IGNORE_UNAVAILABLE); + client.admin().indices().delete(deleteIndices, new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + logger.debug("Completed deletion of stale enrich indices [{}]", () -> Arrays.toString(removeIndices)); + concludeMaintenance(); + } + + @Override + public void onFailure(Exception e) { + logger.error(() -> "Enrich maintenance task could not delete abandoned enrich indices [" + + Arrays.toString(removeIndices) + "]", e); + concludeMaintenance(); + } + }); + } else { + concludeMaintenance(); + } + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java new file mode 100644 index 00000000000..e25ed646e87 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import java.io.IOException; +import java.io.UncheckedIOException; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.ingest.IngestMetadata; +import org.elasticsearch.ingest.PipelineConfiguration; + +/** + * Manages the definitions and lifecycle of the ingest pipeline used by the reindex operation within the Enrich Policy execution. + */ +public class EnrichPolicyReindexPipeline { + + /** + * The current version of the pipeline definition. Used in the pipeline's name to differentiate from breaking changes + * (separate from product version). + */ + static final String CURRENT_PIPELINE_VERSION_NAME = "7"; + + /** + * The last version of the distribution that updated the pipelines definition. + * TODO: This should be the version of ES that Enrich first ships in, which likely doesn't exist yet. + */ + static final int ENRICH_PIPELINE_LAST_UPDATED_VERSION = Version.V_7_4_0.id; + + static String pipelineName() { + return "enrich-policy-reindex-" + CURRENT_PIPELINE_VERSION_NAME; + } + + /** + * Checks if the current version of the pipeline definition is installed in the cluster + * @param clusterState The cluster state to check + * @return true if a pipeline exists that is compatible with this version of Enrich, false otherwise + */ + static boolean exists(ClusterState clusterState) { + final IngestMetadata ingestMetadata = clusterState.getMetaData().custom(IngestMetadata.TYPE); + // we ensure that we both have the pipeline and its version represents the current (or later) version + if (ingestMetadata != null) { + final PipelineConfiguration pipeline = ingestMetadata.getPipelines().get(pipelineName()); + if (pipeline != null) { + Object version = pipeline.getConfigAsMap().get("version"); + return version instanceof Number && ((Number) version).intValue() >= ENRICH_PIPELINE_LAST_UPDATED_VERSION; + } + } + return false; + } + + /** + * Creates a pipeline with the current version's pipeline definition + * @param client Client used to execute put pipeline + * @param listener Callback used after pipeline has been created + */ + public static void create(Client client, ActionListener listener) { + final BytesReference pipeline = BytesReference.bytes(currentEnrichPipelineDefinition(XContentType.JSON)); + final PutPipelineRequest request = new PutPipelineRequest(pipelineName(), pipeline, XContentType.JSON); + client.admin().cluster().putPipeline(request, listener); + } + + private static XContentBuilder currentEnrichPipelineDefinition(XContentType xContentType) { + try { + return XContentBuilder.builder(xContentType.xContent()) + .startObject() + .field("description", "This pipeline sanitizes documents that will be stored in enrich indices for ingest lookup " + + "purposes. It is an internal pipeline and should not be modified.") + .field("version", ENRICH_PIPELINE_LAST_UPDATED_VERSION) + .startArray("processors") + .startObject() + // remove the id from the document so that documents from multiple indices will always be unique. + .startObject("remove") + .field("field", "_id") + .endObject() + .endObject() + .endArray() + .endObject(); + } catch (final IOException e) { + throw new UncheckedIOException("Failed to create pipeline for enrich document sanitization", e); + } + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java new file mode 100644 index 00000000000..412ffeef386 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java @@ -0,0 +1,470 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.admin.indices.segments.IndexSegments; +import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; +import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; +import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; +import org.elasticsearch.action.admin.indices.segments.ShardSegments; +import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.index.reindex.ReindexRequest; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.LongSupplier; + +public class EnrichPolicyRunner implements Runnable { + + private static final Logger logger = LogManager.getLogger(EnrichPolicyRunner.class); + + static final String ENRICH_POLICY_NAME_FIELD_NAME = "enrich_policy_name"; + static final String ENRICH_POLICY_TYPE_FIELD_NAME = "enrich_policy_type"; + static final String ENRICH_MATCH_FIELD_NAME = "enrich_match_field"; + static final String ENRICH_README_FIELD_NAME = "enrich_readme"; + + static final String ENRICH_INDEX_README_TEXT = "This index is managed by Elasticsearch and should not be modified in any way."; + + private final String policyName; + private final EnrichPolicy policy; + private final ExecuteEnrichPolicyTask task; + private final ActionListener listener; + private final ClusterService clusterService; + private final Client client; + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final LongSupplier nowSupplier; + private final int fetchSize; + private final int maxForceMergeAttempts; + + EnrichPolicyRunner(String policyName, EnrichPolicy policy, ExecuteEnrichPolicyTask task, + ActionListener listener, ClusterService clusterService, Client client, + IndexNameExpressionResolver indexNameExpressionResolver, LongSupplier nowSupplier, int fetchSize, + int maxForceMergeAttempts) { + this.policyName = policyName; + this.policy = policy; + this.task = task; + this.listener = listener; + this.clusterService = clusterService; + this.client = client; + this.indexNameExpressionResolver = indexNameExpressionResolver; + this.nowSupplier = nowSupplier; + this.fetchSize = fetchSize; + this.maxForceMergeAttempts = maxForceMergeAttempts; + } + + @Override + public void run() { + logger.info("Policy [{}]: Running enrich policy", policyName); + task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.RUNNING)); + // Collect the source index information + final String[] sourceIndices = policy.getIndices().toArray(new String[0]); + logger.debug("Policy [{}]: Checking source indices [{}]", policyName, sourceIndices); + GetIndexRequest getIndexRequest = new GetIndexRequest().indices(sourceIndices); + client.admin().indices().getIndex(getIndexRequest, new ActionListener() { + @Override + public void onResponse(GetIndexResponse getIndexResponse) { + validateMappings(getIndexResponse); + prepareAndCreateEnrichIndex(); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private Map getMappings(final GetIndexResponse getIndexResponse, final String sourceIndexName) { + ImmutableOpenMap> mappings = getIndexResponse.mappings(); + ImmutableOpenMap indexMapping = mappings.get(sourceIndexName); + if (indexMapping.keys().size() == 0) { + throw new ElasticsearchException( + "Enrich policy execution for [{}] failed. No mapping available on source [{}] included in [{}]", + policyName, sourceIndexName, policy.getIndices()); + } + assert indexMapping.keys().size() == 1 : "Expecting only one type per index"; + MappingMetaData typeMapping = indexMapping.iterator().next().value; + return typeMapping.sourceAsMap(); + } + + private void validateMappings(final GetIndexResponse getIndexResponse) { + String[] sourceIndices = getIndexResponse.getIndices(); + logger.debug("Policy [{}]: Validating [{}] source mappings", policyName, sourceIndices); + for (String sourceIndex : sourceIndices) { + Map mapping = getMappings(getIndexResponse, sourceIndex); + // First ensure mapping is set + if (mapping.get("properties") == null) { + throw new ElasticsearchException( + "Enrich policy execution for [{}] failed. Could not read mapping for source [{}] included by pattern [{}]", + policyName, sourceIndex, policy.getIndices()); + } + // Validate the key and values + try { + validateField(mapping, policy.getMatchField(), true); + for (String valueFieldName : policy.getEnrichFields()) { + validateField(mapping, valueFieldName, false); + } + } catch (ElasticsearchException e) { + throw new ElasticsearchException( + "Enrich policy execution for [{}] failed while validating field mappings for index [{}]", + e, policyName, sourceIndex); + } + } + } + + private void validateField(Map properties, String fieldName, boolean fieldRequired) { + assert Strings.isEmpty(fieldName) == false: "Field name cannot be null or empty"; + String[] fieldParts = fieldName.split("\\."); + StringBuilder parent = new StringBuilder(); + Map currentField = properties; + boolean onRoot = true; + for (String fieldPart : fieldParts) { + // Ensure that the current field is of object type only (not a nested type or a non compound field) + Object type = currentField.get("type"); + if (type != null && "object".equals(type) == false) { + throw new ElasticsearchException( + "Could not traverse mapping to field [{}]. The [{}] field must be regular object but was [{}].", + fieldName, + onRoot ? "root" : parent.toString(), + type + ); + } + Map currentProperties = ((Map) currentField.get("properties")); + if (currentProperties == null) { + if (fieldRequired) { + throw new ElasticsearchException( + "Could not traverse mapping to field [{}]. Expected the [{}] field to have sub fields but none were configured.", + fieldName, + onRoot ? "root" : parent.toString() + ); + } else { + return; + } + } + currentField = ((Map) currentProperties.get(fieldPart)); + if (currentField == null) { + if (fieldRequired) { + throw new ElasticsearchException( + "Could not traverse mapping to field [{}]. Could not find the [{}] field under [{}]", + fieldName, + fieldPart, + onRoot ? "root" : parent.toString() + ); + } else { + return; + } + } + if (onRoot) { + onRoot = false; + } else { + parent.append("."); + } + parent.append(fieldPart); + } + } + + private XContentBuilder resolveEnrichMapping(final EnrichPolicy policy) { + // Currently the only supported policy type is EnrichPolicy.MATCH_TYPE, which is a keyword type + final String keyType; + final CheckedFunction matchFieldMapping; + if (EnrichPolicy.MATCH_TYPE.equals(policy.getType())) { + matchFieldMapping = (builder) -> builder.field("type", "keyword").field("doc_values", false); + // No need to also configure index_options, because keyword type defaults to 'docs'. + } else if (EnrichPolicy.GEO_MATCH_TYPE.equals(policy.getType())) { + matchFieldMapping = (builder) -> builder.field("type", "geo_shape"); + } else { + throw new ElasticsearchException("Unrecognized enrich policy type [{}]", policy.getType()); + } + + // Enable _source on enrich index. Explicitly mark key mapping type. + try { + XContentBuilder builder = JsonXContent.contentBuilder(); + builder = builder.startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .field("dynamic", false) + .startObject("_source") + .field("enabled", true) + .endObject() + .startObject("properties") + .startObject(policy.getMatchField()); + builder = matchFieldMapping.apply(builder).endObject().endObject() + .startObject("_meta") + .field(ENRICH_README_FIELD_NAME, ENRICH_INDEX_README_TEXT) + .field(ENRICH_POLICY_NAME_FIELD_NAME, policyName) + .field(ENRICH_MATCH_FIELD_NAME, policy.getMatchField()) + .field(ENRICH_POLICY_TYPE_FIELD_NAME, policy.getType()) + .endObject() + .endObject() + .endObject(); + + return builder; + } catch (IOException ioe) { + throw new UncheckedIOException("Could not render enrich mapping", ioe); + } + } + + private void prepareAndCreateEnrichIndex() { + long nowTimestamp = nowSupplier.getAsLong(); + String enrichIndexName = EnrichPolicy.getBaseName(policyName) + "-" + nowTimestamp; + Settings enrichIndexSettings = Settings.builder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + // No changes will be made to an enrich index after policy execution, so need to enable automatic refresh interval: + .put("index.refresh_interval", -1) + // This disables eager global ordinals loading for all fields: + .put("index.warmer.enabled", false) + .build(); + CreateIndexRequest createEnrichIndexRequest = new CreateIndexRequest(enrichIndexName, enrichIndexSettings); + createEnrichIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, resolveEnrichMapping(policy)); + logger.debug("Policy [{}]: Creating new enrich index [{}]", policyName, enrichIndexName); + client.admin().indices().create(createEnrichIndexRequest, new ActionListener() { + @Override + public void onResponse(CreateIndexResponse createIndexResponse) { + prepareReindexOperation(enrichIndexName); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private void prepareReindexOperation(final String destinationIndexName) { + // Check to make sure that the enrich pipeline exists, and create it if it is missing. + if (EnrichPolicyReindexPipeline.exists(clusterService.state()) == false) { + EnrichPolicyReindexPipeline.create(client, new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + transferDataToEnrichIndex(destinationIndexName); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } else { + transferDataToEnrichIndex(destinationIndexName); + } + } + + private void transferDataToEnrichIndex(final String destinationIndexName) { + logger.debug("Policy [{}]: Transferring source data to new enrich index [{}]", policyName, destinationIndexName); + // Filter down the source fields to just the ones required by the policy + final Set retainFields = new HashSet<>(); + retainFields.add(policy.getMatchField()); + retainFields.addAll(policy.getEnrichFields()); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.fetchSource(retainFields.toArray(new String[0]), new String[0]); + if (policy.getQuery() != null) { + searchSourceBuilder.query(QueryBuilders.wrapperQuery(policy.getQuery().getQuery())); + } + ReindexRequest reindexRequest = new ReindexRequest() + .setDestIndex(destinationIndexName) + .setSourceIndices(policy.getIndices().toArray(new String[0])); + reindexRequest.getSearchRequest().source(searchSourceBuilder); + reindexRequest.getDestination().source(new BytesArray(new byte[0]), XContentType.SMILE); + reindexRequest.getDestination().routing("discard"); + reindexRequest.getDestination().setPipeline(EnrichPolicyReindexPipeline.pipelineName()); + client.execute(ReindexAction.INSTANCE, reindexRequest, new ActionListener() { + @Override + public void onResponse(BulkByScrollResponse bulkByScrollResponse) { + // Do we want to fail the request if there were failures during the reindex process? + if (bulkByScrollResponse.getBulkFailures().size() > 0) { + listener.onFailure(new ElasticsearchException("Encountered bulk failures during reindex process")); + } else if (bulkByScrollResponse.getSearchFailures().size() > 0) { + listener.onFailure(new ElasticsearchException("Encountered search failures during reindex process")); + } else { + logger.info("Policy [{}]: Transferred [{}] documents to enrich index [{}]", policyName, + bulkByScrollResponse.getCreated(), destinationIndexName); + forceMergeEnrichIndex(destinationIndexName, 1); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private void forceMergeEnrichIndex(final String destinationIndexName, final int attempt) { + logger.debug("Policy [{}]: Force merging newly created enrich index [{}] (Attempt {}/{})", policyName, destinationIndexName, + attempt, maxForceMergeAttempts); + client.admin().indices().forceMerge(new ForceMergeRequest(destinationIndexName).maxNumSegments(1), + new ActionListener() { + @Override + public void onResponse(ForceMergeResponse forceMergeResponse) { + refreshEnrichIndex(destinationIndexName, attempt); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private void refreshEnrichIndex(final String destinationIndexName, final int attempt) { + logger.debug("Policy [{}]: Refreshing enrich index [{}]", policyName, destinationIndexName); + client.admin().indices().refresh(new RefreshRequest(destinationIndexName), new ActionListener() { + @Override + public void onResponse(RefreshResponse refreshResponse) { + ensureSingleSegment(destinationIndexName, attempt); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + protected void ensureSingleSegment(final String destinationIndexName, final int attempt) { + client.admin().indices().segments(new IndicesSegmentsRequest(destinationIndexName), new ActionListener() { + @Override + public void onResponse(IndicesSegmentResponse indicesSegmentResponse) { + IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(destinationIndexName); + if (indexSegments == null) { + throw new ElasticsearchException("Could not locate segment information for newly created index [{}]", + destinationIndexName); + } + Map indexShards = indexSegments.getShards(); + assert indexShards.size() == 1 : "Expected enrich index to contain only one shard"; + ShardSegments[] shardSegments = indexShards.get(0).getShards(); + assert shardSegments.length == 1 : "Expected enrich index to contain no replicas at this point"; + ShardSegments primarySegments = shardSegments[0]; + if (primarySegments.getSegments().size() > 1) { + int nextAttempt = attempt + 1; + if (nextAttempt > maxForceMergeAttempts) { + listener.onFailure(new ElasticsearchException( + "Force merging index [{}] attempted [{}] times but did not result in one segment.", + destinationIndexName, attempt, maxForceMergeAttempts)); + } else { + logger.debug("Policy [{}]: Force merge result contains more than one segment [{}], retrying (attempt {}/{})", + policyName, primarySegments.getSegments().size(), nextAttempt, maxForceMergeAttempts); + forceMergeEnrichIndex(destinationIndexName, nextAttempt); + } + } else { + // Force merge down to one segment successful + setIndexReadOnly(destinationIndexName); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private void setIndexReadOnly(final String destinationIndexName) { + logger.debug("Policy [{}]: Setting new enrich index [{}] to be read only", policyName, destinationIndexName); + UpdateSettingsRequest request = new UpdateSettingsRequest(destinationIndexName) + .setPreserveExisting(true) + .settings(Settings.builder() + .put("index.auto_expand_replicas", "0-all") + .put("index.blocks.write", "true")); + client.admin().indices().updateSettings(request, new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + waitForIndexGreen(destinationIndexName); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private void waitForIndexGreen(final String destinationIndexName) { + ClusterHealthRequest request = new ClusterHealthRequest(destinationIndexName).waitForGreenStatus(); + client.admin().cluster().health(request, new ActionListener() { + @Override + public void onResponse(ClusterHealthResponse clusterHealthResponse) { + updateEnrichPolicyAlias(destinationIndexName); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private void updateEnrichPolicyAlias(final String destinationIndexName) { + String enrichIndexBase = EnrichPolicy.getBaseName(policyName); + logger.debug("Policy [{}]: Promoting new enrich index [{}] to alias [{}]", policyName, destinationIndexName, enrichIndexBase); + GetAliasesRequest aliasRequest = new GetAliasesRequest(enrichIndexBase); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterService.state(), aliasRequest); + ImmutableOpenMap> aliases = + clusterService.state().metaData().findAliases(aliasRequest, concreteIndices); + IndicesAliasesRequest aliasToggleRequest = new IndicesAliasesRequest(); + String[] indices = aliases.keys().toArray(String.class); + if (indices.length > 0) { + aliasToggleRequest.addAliasAction(IndicesAliasesRequest.AliasActions.remove().indices(indices).alias(enrichIndexBase)); + } + aliasToggleRequest.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(destinationIndexName).alias(enrichIndexBase)); + client.admin().indices().aliases(aliasToggleRequest, new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + logger.info("Policy [{}]: Policy execution complete", policyName); + ExecuteEnrichPolicyStatus completeStatus = new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE); + task.setStatus(completeStatus); + listener.onResponse(completeStatus); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java new file mode 100644 index 00000000000..a4251d9f170 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasOrIndex; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.util.Map; +import java.util.function.Consumer; + +final class EnrichProcessorFactory implements Processor.Factory, Consumer { + + static final String TYPE = "enrich"; + private final Client client; + + volatile MetaData metaData; + + EnrichProcessorFactory(Client client) { + this.client = client; + } + + @Override + public Processor create(Map processorFactories, String tag, Map config) throws Exception { + String policyName = ConfigurationUtils.readStringProperty(TYPE, tag, config, "policy_name"); + String policyAlias = EnrichPolicy.getBaseName(policyName); + AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(policyAlias); + if (aliasOrIndex == null) { + throw new IllegalArgumentException("no enrich index exists for policy with name [" + policyName + "]"); + } + assert aliasOrIndex.isAlias(); + assert aliasOrIndex.getIndices().size() == 1; + IndexMetaData imd = aliasOrIndex.getIndices().get(0); + + String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field"); + Map mappingAsMap = imd.mapping().sourceAsMap(); + String policyType = + (String) XContentMapValues.extractValue("_meta." + EnrichPolicyRunner.ENRICH_POLICY_TYPE_FIELD_NAME, mappingAsMap); + String matchField = (String) XContentMapValues.extractValue("_meta." + EnrichPolicyRunner.ENRICH_MATCH_FIELD_NAME, mappingAsMap); + + boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, tag, config, "ignore_missing", false); + boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, tag, config, "override", true); + String targetField = ConfigurationUtils.readStringProperty(TYPE, tag, config, "target_field"); + int maxMatches = ConfigurationUtils.readIntProperty(TYPE, tag, config, "max_matches", 1); + if (maxMatches < 1 || maxMatches > 128) { + throw ConfigurationUtils.newConfigurationException(TYPE, tag, "max_matches", "should be between 1 and 128"); + } + + switch (policyType) { + case EnrichPolicy.MATCH_TYPE: + return new MatchProcessor(tag, client, policyName, field, targetField, overrideEnabled, ignoreMissing, matchField, + maxMatches); + case EnrichPolicy.GEO_MATCH_TYPE: + String relationStr = ConfigurationUtils.readStringProperty(TYPE, tag, config, "shape_relation", "intersects"); + ShapeRelation shapeRelation = ShapeRelation.getRelationByName(relationStr); + return new GeoMatchProcessor(tag, client, policyName, field, targetField, overrideEnabled, ignoreMissing, matchField, + maxMatches, shapeRelation); + default: + throw new IllegalArgumentException("unsupported policy type [" + policyType + "]"); + } + } + + @Override + public void accept(ClusterState state) { + metaData = state.getMetaData(); + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java new file mode 100644 index 00000000000..3eda3e8c71f --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichStore.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Function; + +/** + * Helper methods for access and storage of an enrich policy. + */ +public final class EnrichStore { + + private EnrichStore() {} + + /** + * Adds a new enrich policy or overwrites an existing policy if there is already a policy with the same name. + * This method can only be invoked on the elected master node. + * + * @param name The unique name of the policy + * @param policy The policy to store + * @param handler The handler that gets invoked if policy has been stored or a failure has occurred. + */ + public static void putPolicy(String name, EnrichPolicy policy, ClusterService clusterService, Consumer handler) { + assert clusterService.localNode().isMasterNode(); + + if (Strings.isNullOrEmpty(name)) { + throw new IllegalArgumentException("name is missing or empty"); + } + if (policy == null) { + throw new IllegalArgumentException("policy is missing"); + } + // The policy name is used to create the enrich index name and + // therefor a policy name has the same restrictions as an index name + MetaDataCreateIndexService.validateIndexOrAliasName(name, + (policyName, error) -> new IllegalArgumentException("Invalid policy name [" + policyName + "], " + error)); + if (name.toLowerCase(Locale.ROOT).equals(name) == false) { + throw new IllegalArgumentException("Invalid policy name [" + name + "], must be lowercase"); + } + // TODO: add policy validation + + final EnrichPolicy finalPolicy; + if (policy.getElasticsearchVersion() == null) { + finalPolicy = new EnrichPolicy( + policy.getType(), + policy.getQuery(), + policy.getIndices(), + policy.getMatchField(), + policy.getEnrichFields(), + Version.CURRENT + ); + } else { + finalPolicy = policy; + } + updateClusterState(clusterService, handler, current -> { + final Map policies = getPolicies(current); + if (policies.get(name) != null) { + throw new ResourceAlreadyExistsException("policy [{}] already exists", name); + } + policies.put(name, finalPolicy); + return policies; + }); + } + + /** + * Removes an enrich policy from the policies in the cluster state. This method can only be invoked on the + * elected master node. + * + * @param name The unique name of the policy + * @param handler The handler that gets invoked if policy has been stored or a failure has occurred. + */ + public static void deletePolicy(String name, ClusterService clusterService, Consumer handler) { + assert clusterService.localNode().isMasterNode(); + + if (Strings.isNullOrEmpty(name)) { + throw new IllegalArgumentException("name is missing or empty"); + } + + updateClusterState(clusterService, handler, current -> { + final Map policies = getPolicies(current); + if (policies.containsKey(name) == false) { + throw new ResourceNotFoundException("policy [{}] not found", name); + } + + policies.remove(name); + return policies; + }); + } + + /** + * Gets an enrich policy for the provided name if exists or otherwise returns null. + * + * @param name The name of the policy to fetch + * @return enrich policy if exists or null otherwise + */ + public static EnrichPolicy getPolicy(String name, ClusterState state) { + if (Strings.isNullOrEmpty(name)) { + throw new IllegalArgumentException("name is missing or empty"); + } + + return getPolicies(state).get(name); + } + + /** + * Gets all policies in the cluster. + * + * @param state the cluster state + * @return a Map of policyName, EnrichPolicy of the policies + */ + public static Map getPolicies(ClusterState state) { + final Map policies; + final EnrichMetadata enrichMetadata = state.metaData().custom(EnrichMetadata.TYPE); + if (enrichMetadata != null) { + // Make a copy, because policies map inside custom metadata is read only: + policies = new HashMap<>(enrichMetadata.getPolicies()); + } else { + policies = new HashMap<>(); + } + return policies; + } + + private static void updateClusterState(ClusterService clusterService, + Consumer handler, + Function> function) { + clusterService.submitStateUpdateTask("update-enrich-metadata", new ClusterStateUpdateTask() { + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + Map policies = function.apply(currentState); + MetaData metaData = MetaData.builder(currentState.metaData()) + .putCustom(EnrichMetadata.TYPE, new EnrichMetadata(policies)) + .build(); + return ClusterState.builder(currentState) + .metaData(metaData) + .build(); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + handler.accept(null); + } + + @Override + public void onFailure(String source, Exception e) { + handler.accept(e); + } + }); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/ExecuteEnrichPolicyTask.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/ExecuteEnrichPolicyTask.java new file mode 100644 index 00000000000..dedd45920fa --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/ExecuteEnrichPolicyTask.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import java.util.Map; + +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; + +class ExecuteEnrichPolicyTask extends Task { + + private volatile ExecuteEnrichPolicyStatus status; + + ExecuteEnrichPolicyTask(long id, String type, String action, String description, TaskId parentTask, + Map headers) { + super(id, type, action, description, parentTask, headers); + } + + @Override + public Status getStatus() { + return status; + } + + void setStatus(ExecuteEnrichPolicyStatus status) { + this.status = status; + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java new file mode 100644 index 00000000000..ebe05772bf0 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.MultiPoint; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.index.query.GeoShapeQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiConsumer; + +public final class GeoMatchProcessor extends AbstractEnrichProcessor { + + private ShapeRelation shapeRelation; + + GeoMatchProcessor(String tag, + Client client, + String policyName, + String field, + String targetField, + boolean overrideEnabled, + boolean ignoreMissing, + String matchField, + int maxMatches, + ShapeRelation shapeRelation) { + super(tag, client, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches); + this.shapeRelation = shapeRelation; + } + + /** used in tests **/ + GeoMatchProcessor(String tag, + BiConsumer> searchRunner, + String policyName, + String field, + String targetField, + boolean overrideEnabled, + boolean ignoreMissing, + String matchField, + int maxMatches, ShapeRelation shapeRelation) { + super(tag, searchRunner, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches); + this.shapeRelation = shapeRelation; + } + + @SuppressWarnings("unchecked") + @Override + public QueryBuilder getQueryBuilder(Object fieldValue) { + List points = new ArrayList<>(); + if (fieldValue instanceof List) { + List values = (List) fieldValue; + if (values.size() == 2 && values.get(0) instanceof Number) { + GeoPoint geoPoint = GeoUtils.parseGeoPoint(values, true); + points.add(new Point(geoPoint.lon(), geoPoint.lat())); + } else { + for (Object value : values) { + GeoPoint geoPoint = GeoUtils.parseGeoPoint(value, true); + points.add(new Point(geoPoint.lon(), geoPoint.lat())); + } + } + } else { + GeoPoint geoPoint = GeoUtils.parseGeoPoint(fieldValue, true); + points.add(new Point(geoPoint.lon(), geoPoint.lat())); + } + final Geometry queryGeometry; + if (points.isEmpty()) { + throw new IllegalArgumentException("no geopoints found"); + } else if (points.size() == 1) { + queryGeometry = points.get(0); + } else { + queryGeometry = new MultiPoint(points); + } + GeoShapeQueryBuilder shapeQuery = new GeoShapeQueryBuilder(matchField, queryGeometry); + shapeQuery.relation(shapeRelation); + return shapeQuery; + } + + public ShapeRelation getShapeRelation() { + return shapeRelation; + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java new file mode 100644 index 00000000000..104a0f0b267 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; + +import java.util.List; +import java.util.function.BiConsumer; + +public class MatchProcessor extends AbstractEnrichProcessor { + + MatchProcessor(String tag, + Client client, + String policyName, + String field, + String targetField, + boolean overrideEnabled, + boolean ignoreMissing, + String matchField, + int maxMatches) { + super(tag, client, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches); + } + + /** used in tests **/ + MatchProcessor(String tag, + BiConsumer> searchRunner, + String policyName, + String field, + String targetField, + boolean overrideEnabled, + boolean ignoreMissing, + String matchField, + int maxMatches) { + super(tag, searchRunner, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches); + } + + @Override + public QueryBuilder getQueryBuilder(Object fieldValue) { + if (fieldValue instanceof List) { + return new TermsQueryBuilder(matchField, (List) fieldValue); + } else { + return new TermQueryBuilder(matchField, fieldValue); + } + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java new file mode 100644 index 00000000000..c6d25195682 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java @@ -0,0 +1,235 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.apache.logging.log4j.util.BiConsumer; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; +import org.elasticsearch.xpack.enrich.EnrichPlugin; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +/** + * An internal action to locally manage the load of the search requests that originate from the enrich processor. + * This is because the enrich processor executes asynchronously and a bulk request could easily overload + * the search tp. + */ +public class EnrichCoordinatorProxyAction extends ActionType { + + public static final EnrichCoordinatorProxyAction INSTANCE = new EnrichCoordinatorProxyAction(); + public static final String NAME = "indices:data/read/xpack/enrich/coordinate_lookups"; + + private EnrichCoordinatorProxyAction() { + super(NAME, SearchResponse::new); + } + + public static class TransportAction extends HandledTransportAction { + + private final Coordinator coordinator; + + @Inject + public TransportAction(TransportService transportService, ActionFilters actionFilters, Coordinator coordinator) { + super(NAME, transportService, actionFilters, SearchRequest::new); + this.coordinator = coordinator; + } + + @Override + protected void doExecute(Task task, SearchRequest request, ActionListener listener) { + assert Thread.currentThread().getName().contains(ThreadPool.Names.WRITE); + coordinator.schedule(request, listener); + } + } + + public static class Coordinator { + + final BiConsumer> lookupFunction; + final int maxLookupsPerRequest; + final int maxNumberOfConcurrentRequests; + final BlockingQueue queue; + final AtomicInteger remoteRequestsCurrent = new AtomicInteger(0); + volatile long remoteRequestsTotal = 0; + final AtomicLong executedSearchesTotal = new AtomicLong(0); + + public Coordinator(Client client, Settings settings) { + this( + lookupFunction(client), + EnrichPlugin.COORDINATOR_PROXY_MAX_LOOKUPS_PER_REQUEST.get(settings), + EnrichPlugin.COORDINATOR_PROXY_MAX_CONCURRENT_REQUESTS.get(settings), + EnrichPlugin.COORDINATOR_PROXY_QUEUE_CAPACITY.get(settings) + ); + } + + Coordinator(BiConsumer> lookupFunction, + int maxLookupsPerRequest, int maxNumberOfConcurrentRequests, int queueCapacity) { + this.lookupFunction = lookupFunction; + this.maxLookupsPerRequest = maxLookupsPerRequest; + this.maxNumberOfConcurrentRequests = maxNumberOfConcurrentRequests; + this.queue = new ArrayBlockingQueue<>(queueCapacity); + } + + void schedule(SearchRequest searchRequest, ActionListener listener) { + // Use put(...), because if queue is full then this method will wait until a free slot becomes available + // The calling thread here is a write thread (write tp is used by ingest) and + // this will create natural back pressure from the enrich processor. + // If there are no write threads available then write requests with ingestion will fail with 429 error code. + try { + queue.put(new Slot(searchRequest, listener)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException("unable to add item to queue", e); + } + coordinateLookups(); + } + + CoordinatorStats getStats(String nodeId) { + return new CoordinatorStats(nodeId, queue.size(), remoteRequestsCurrent.get(), remoteRequestsTotal, + executedSearchesTotal.get()); + } + + synchronized void coordinateLookups() { + while (queue.isEmpty() == false && + remoteRequestsCurrent.get() < maxNumberOfConcurrentRequests) { + + final List slots = new ArrayList<>(); + queue.drainTo(slots, maxLookupsPerRequest); + final MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + slots.forEach(slot -> multiSearchRequest.add(slot.searchRequest)); + + remoteRequestsCurrent.incrementAndGet(); + remoteRequestsTotal++; + lookupFunction.accept(multiSearchRequest, (response, e) -> { + handleResponse(slots, response, e); + }); + } + } + + void handleResponse(List slots, MultiSearchResponse response, Exception e) { + remoteRequestsCurrent.decrementAndGet(); + executedSearchesTotal.addAndGet(slots.size()); + + if (response != null) { + assert slots.size() == response.getResponses().length; + for (int i = 0; i < response.getResponses().length; i++) { + MultiSearchResponse.Item responseItem = response.getResponses()[i]; + Slot slot = slots.get(i); + + if (responseItem.isFailure()) { + slot.actionListener.onFailure(responseItem.getFailure()); + } else { + slot.actionListener.onResponse(responseItem.getResponse()); + } + } + } else if (e != null) { + slots.forEach(slot -> slot.actionListener.onFailure(e)); + } else { + throw new AssertionError("no response and no error"); + } + + // There may be room to for a new request now that numberOfOutstandingRequests has been decreased: + coordinateLookups(); + } + + static class Slot { + + final SearchRequest searchRequest; + final ActionListener actionListener; + + Slot(SearchRequest searchRequest, ActionListener actionListener) { + this.searchRequest = Objects.requireNonNull(searchRequest); + this.actionListener = Objects.requireNonNull(actionListener); + } + } + + static BiConsumer> lookupFunction(ElasticsearchClient client) { + return (request, consumer) -> { + int slot = 0; + final Map>> itemsPerIndex = new HashMap<>(); + for (SearchRequest searchRequest : request.requests()) { + List> items = + itemsPerIndex.computeIfAbsent(searchRequest.indices()[0], k -> new ArrayList<>()); + items.add(new Tuple<>(slot, searchRequest)); + slot++; + } + + final AtomicInteger counter = new AtomicInteger(0); + final ConcurrentMap> shardResponses = new ConcurrentHashMap<>(); + for (Map.Entry>> entry : itemsPerIndex.entrySet()) { + final String enrichIndexName = entry.getKey(); + final List> enrichIndexRequestsAndSlots = entry.getValue(); + ActionListener listener = ActionListener.wrap( + response -> { + shardResponses.put(enrichIndexName, new Tuple<>(response, null)); + if (counter.incrementAndGet() == itemsPerIndex.size()) { + consumer.accept(reduce(request.requests().size(), itemsPerIndex, shardResponses), null); + } + }, + e -> { + shardResponses.put(enrichIndexName, new Tuple<>(null, e)); + if (counter.incrementAndGet() == itemsPerIndex.size()) { + consumer.accept(reduce(request.requests().size(), itemsPerIndex, shardResponses), null); + } + } + ); + + MultiSearchRequest mrequest = new MultiSearchRequest(); + enrichIndexRequestsAndSlots.stream().map(Tuple::v2).forEach(mrequest::add); + client.execute(EnrichShardMultiSearchAction.INSTANCE, new EnrichShardMultiSearchAction.Request(mrequest), listener); + } + }; + } + + static MultiSearchResponse reduce(int numRequest, + Map>> itemsPerIndex, + Map> shardResponses) { + MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[numRequest]; + for (Map.Entry> rspEntry : shardResponses.entrySet()) { + List> reqSlots = itemsPerIndex.get(rspEntry.getKey()); + if (rspEntry.getValue().v1() != null) { + MultiSearchResponse shardResponse = rspEntry.getValue().v1(); + for (int i = 0; i < shardResponse.getResponses().length; i++) { + int slot = reqSlots.get(i).v1(); + items[slot] = shardResponse.getResponses()[i]; + } + } else if (rspEntry.getValue().v2() != null) { + Exception e = rspEntry.getValue().v2(); + for (Tuple originSlot : reqSlots) { + items[originSlot.v1()] = new MultiSearchResponse.Item(null, e); + } + } else { + throw new AssertionError(); + } + } + return new MultiSearchResponse(items, 1L); + } + + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorStatsAction.java new file mode 100644 index 00000000000..4d59d8dc531 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorStatsAction.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; + +import java.io.IOException; +import java.util.List; + +/** + * This is an internal action that gather coordinator stats from each node with an ingest role in the cluster. + * This action is only used via the {@link EnrichStatsAction}. + */ +public class EnrichCoordinatorStatsAction extends ActionType { + + public static final EnrichCoordinatorStatsAction INSTANCE = new EnrichCoordinatorStatsAction(); + public static final String NAME = "cluster:admin/xpack/enrich/coordinator_stats"; + + private EnrichCoordinatorStatsAction() { + super(NAME, Response::new); + } + + // This always executes on all ingest nodes, hence no node ids need to be provided. + public static class Request extends BaseNodesRequest { + + public Request() { + super(new String[0]); + } + + Request(StreamInput in) throws IOException { + super(in); + } + } + + public static class NodeRequest extends BaseNodeRequest { + + NodeRequest() {} + + NodeRequest(StreamInput in) throws IOException { + super(in); + } + + } + + public static class Response extends BaseNodesResponse { + + Response(StreamInput in) throws IOException { + super(in); + } + + Response(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeResponse::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeList(nodes); + } + } + + public static class NodeResponse extends BaseNodeResponse { + + private final CoordinatorStats coordinatorStats; + + NodeResponse(DiscoveryNode node, CoordinatorStats coordinatorStats) { + super(node); + this.coordinatorStats = coordinatorStats; + } + + NodeResponse(StreamInput in) throws IOException { + super(in); + this.coordinatorStats = new CoordinatorStats(in); + } + + public CoordinatorStats getCoordinatorStats() { + return coordinatorStats; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + coordinatorStats.writeTo(out); + } + } + + public static class TransportAction extends TransportNodesAction { + + private final EnrichCoordinatorProxyAction.Coordinator coordinator; + + @Inject + public TransportAction(ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + ActionFilters actionFilters, EnrichCoordinatorProxyAction.Coordinator coordinator) { + super(NAME, threadPool, clusterService, transportService, actionFilters, Request::new, NodeRequest::new, + ThreadPool.Names.SAME, NodeResponse.class); + this.coordinator = coordinator; + } + + @Override + protected void resolveRequest(Request request, ClusterState clusterState) { + DiscoveryNode[] ingestNodes = clusterState.getNodes().getIngestNodes().values().toArray(DiscoveryNode.class); + request.setConcreteNodes(ingestNodes); + } + + @Override + protected Response newResponse(Request request, List nodeResponses, List failures) { + return new Response(clusterService.getClusterName(), nodeResponses, failures); + } + + @Override + protected NodeRequest newNodeRequest(Request request) { + return new NodeRequest(); + } + + @Override + protected NodeResponse newNodeResponse(StreamInput in) throws IOException { + return new NodeResponse(in); + } + + @Override + protected NodeResponse nodeOperation(NodeRequest request) { + DiscoveryNode node = clusterService.localNode(); + return new NodeResponse(node, coordinator.getStats(node.getId())); + } + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java new file mode 100644 index 00000000000..c1144310eec --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java @@ -0,0 +1,278 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TopDocs; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.fieldvisitor.FieldsVisitor; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * This is an internal action, that executes msearch requests for enrich indices in a more efficient manner. + * Currently each search request inside a msearch request is executed as a separate search. If many search requests + * are targeted to the same shards then there is quite some overhead in executing each search request as a separate + * search (multiple search contexts, opening of multiple searchers). + * + * In case for the enrich processor, searches are always targeting the same single shard indices. This action + * handles multi search requests targeting enrich indices more efficiently by executing them in a bulk using the same + * searcher and query shard context. + * + * This action (plus some coordination logic in {@link EnrichCoordinatorProxyAction}) can be removed when msearch can + * execute search requests targeted to the same shard more efficiently in a bulk like style. + * + * Note that this 'msearch' implementation only supports executing a query, pagination and source filtering. + * Other search features are not supported, because the enrich processor isn't using these search features. + */ +public class EnrichShardMultiSearchAction extends ActionType { + + public static final EnrichShardMultiSearchAction INSTANCE = new EnrichShardMultiSearchAction(); + private static final String NAME = "indices:data/read/shard_multi_search"; + + private EnrichShardMultiSearchAction() { + super(NAME, MultiSearchResponse::new); + } + + public static class Request extends SingleShardRequest { + + private final MultiSearchRequest multiSearchRequest; + + public Request(MultiSearchRequest multiSearchRequest) { + super(multiSearchRequest.requests().get(0).indices()[0]); + this.multiSearchRequest = multiSearchRequest; + assert multiSearchRequest.requests().stream() + .map(SearchRequest::indices) + .flatMap(Arrays::stream) + .distinct() + .count() == 1 : "action [" + NAME + "] cannot handle msearch request pointing to multiple indices"; + assert assertSearchSource(); + } + + public Request(StreamInput in) throws IOException { + super(in); + multiSearchRequest = new MultiSearchRequest(in); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = validateNonNullIndex(); + if (index.startsWith(EnrichPolicy.ENRICH_INDEX_NAME_BASE) == false) { + validationException = ValidateActions.addValidationError("index [" + index + "] is not an enrich index", + validationException); + } + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + multiSearchRequest.writeTo(out); + } + + MultiSearchRequest getMultiSearchRequest() { + return multiSearchRequest; + } + + private boolean assertSearchSource() { + for (SearchRequest request : multiSearchRequest.requests()) { + SearchSourceBuilder copy = copy(request.source()); + + // validate that only a from, size, query and source filtering has been provided (other features are not supported): + // (first unset, what is supported and then see if there is anything left) + copy.query(null); + copy.from(0); + copy.size(10); + copy.fetchSource(null); + assert EMPTY_SOURCE.equals(copy) : "search request [" + Strings.toString(copy) + + "] is using features that is not supported"; + } + return true; + } + + private SearchSourceBuilder copy(SearchSourceBuilder source) { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + NamedWriteableRegistry registry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); + try (BytesStreamOutput output = new BytesStreamOutput()) { + source.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), registry)) { + return new SearchSourceBuilder(in); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private static final SearchSourceBuilder EMPTY_SOURCE = new SearchSourceBuilder() + // can't set -1 to indicate not specified + .from(0).size(10); + } + + public static class TransportAction extends TransportSingleShardAction { + + private final IndicesService indicesService; + + @Inject + public TransportAction(ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + IndicesService indicesService) { + super(NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + Request::new, ThreadPool.Names.SEARCH); + this.indicesService = indicesService; + } + + @Override + protected Writeable.Reader getResponseReader() { + return MultiSearchResponse::new; + } + + @Override + protected boolean resolveIndex(Request request) { + return true; + } + + @Override + protected ShardsIterator shards(ClusterState state, InternalRequest request) { + String index = request.concreteIndex(); + IndexRoutingTable indexRouting = state.routingTable().index(index); + int numShards = indexRouting.shards().size(); + if (numShards != 1) { + throw new IllegalStateException("index [" + index + "] should have 1 shard, but has " + numShards + " shards"); + } + + GroupShardsIterator result = + clusterService.operationRouting().searchShards(state, new String[] {index}, null, Preference.LOCAL.type()); + return result.get(0); + } + + @Override + protected MultiSearchResponse shardOperation(Request request, ShardId shardId) throws IOException { + final IndexService indexService = indicesService.indexService(shardId.getIndex()); + final IndexShard indexShard = indicesService.getShardOrNull(shardId); + try (Engine.Searcher searcher = indexShard.acquireSearcher("enrich_msearch")) { + final FieldsVisitor visitor = new FieldsVisitor(true); + final QueryShardContext context = indexService.newQueryShardContext(shardId.id(), + searcher, () -> {throw new UnsupportedOperationException();}, null); + final MapperService mapperService = context.getMapperService(); + final Text typeText = mapperService.documentMapper().typeText(); + + final MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[request.multiSearchRequest.requests().size()]; + for (int i = 0; i < request.multiSearchRequest.requests().size(); i++) { + final SearchSourceBuilder searchSourceBuilder = request.multiSearchRequest.requests().get(i).source(); + + final QueryBuilder queryBuilder = searchSourceBuilder.query(); + final int from = searchSourceBuilder.from(); + final int size = searchSourceBuilder.size(); + final FetchSourceContext fetchSourceContext = searchSourceBuilder.fetchSource(); + + final Query luceneQuery = queryBuilder.rewrite(context).toQuery(context); + final int n = from + size; + final TopDocs topDocs = searcher.search(luceneQuery, n, new Sort(SortField.FIELD_DOC)); + + final SearchHit[] hits = new SearchHit[topDocs.scoreDocs.length]; + for (int j = 0; j < topDocs.scoreDocs.length; j++) { + final ScoreDoc scoreDoc = topDocs.scoreDocs[j]; + + visitor.reset(); + searcher.doc(scoreDoc.doc, visitor); + visitor.postProcess(mapperService); + final SearchHit hit = new SearchHit(scoreDoc.doc, visitor.uid().id(), typeText, Collections.emptyMap()); + hit.sourceRef(filterSource(fetchSourceContext, visitor.source())); + hits[j] = hit; + } + items[i] = new MultiSearchResponse.Item(createSearchResponse(topDocs, hits), null); + } + return new MultiSearchResponse(items, 1L); + } + } + + } + + private static BytesReference filterSource(FetchSourceContext fetchSourceContext, BytesReference source) throws IOException { + if (fetchSourceContext.includes().length == 0 && fetchSourceContext.excludes().length == 0) { + return source; + } + + Set includes = new HashSet<>(Arrays.asList(fetchSourceContext.includes())); + Set excludes = new HashSet<>(Arrays.asList(fetchSourceContext.excludes())); + + XContentBuilder builder = + new XContentBuilder(XContentType.SMILE.xContent(), new BytesStreamOutput(source.length()), includes, excludes); + XContentParser sourceParser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source, XContentType.SMILE); + builder.copyCurrentStructure(sourceParser); + return BytesReference.bytes(builder); + } + + private static SearchResponse createSearchResponse(TopDocs topDocs, SearchHit[] hits) { + SearchHits searchHits = new SearchHits(hits, topDocs.totalHits, 0); + return new SearchResponse( + new InternalSearchResponse(searchHits, null, null, null, false, null, 0), + null, 1, 1, 0, 1L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY + ); + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java new file mode 100644 index 00000000000..615c6295438 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.ingest.PipelineConfiguration; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.AbstractEnrichProcessor; +import org.elasticsearch.xpack.enrich.EnrichPolicyLocks; +import org.elasticsearch.xpack.enrich.EnrichStore; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class TransportDeleteEnrichPolicyAction extends TransportMasterNodeAction { + + private final EnrichPolicyLocks enrichPolicyLocks; + private final IngestService ingestService; + private final Client client; + // the most lenient we can get in order to not bomb out if no indices are found, which is a valid case + // where a user creates and deletes a policy before running execute + private static final IndicesOptions LENIENT_OPTIONS = IndicesOptions.fromOptions(true, true, true, true); + + + @Inject + public TransportDeleteEnrichPolicyAction(TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client, + EnrichPolicyLocks enrichPolicyLocks, + IngestService ingestService) { + super(DeleteEnrichPolicyAction.NAME, transportService, clusterService, threadPool, actionFilters, + DeleteEnrichPolicyAction.Request::new, indexNameExpressionResolver); + this.client = client; + this.enrichPolicyLocks = enrichPolicyLocks; + this.ingestService = ingestService; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + protected AcknowledgedResponse newResponse() { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + protected AcknowledgedResponse read(StreamInput in) throws IOException { + return new AcknowledgedResponse(in); + } + + @Override + protected void masterOperation(DeleteEnrichPolicyAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + EnrichPolicy policy = EnrichStore.getPolicy(request.getName(), state); // ensure the policy exists first + if (policy == null) { + throw new ResourceNotFoundException("policy [{}] not found", request.getName()); + } + + enrichPolicyLocks.lockPolicy(request.getName()); + try { + List pipelines = IngestService.getPipelines(state); + List pipelinesWithProcessors = new ArrayList<>(); + + for (PipelineConfiguration pipelineConfiguration : pipelines) { + List enrichProcessors = + ingestService.getProcessorsInPipeline(pipelineConfiguration.getId(), AbstractEnrichProcessor.class); + for (AbstractEnrichProcessor processor : enrichProcessors) { + if (processor.getPolicyName().equals(request.getName())) { + pipelinesWithProcessors.add(pipelineConfiguration.getId()); + } + } + } + + if (pipelinesWithProcessors.isEmpty() == false) { + throw new ElasticsearchStatusException("Could not delete policy [{}] because a pipeline is referencing it {}", + RestStatus.CONFLICT, request.getName(), pipelinesWithProcessors); + } + } catch (Exception e) { + enrichPolicyLocks.releasePolicy(request.getName()); + listener.onFailure(e); + return; + } + + deleteIndicesAndPolicy(request.getName(), ActionListener.wrap( + (response) -> { + enrichPolicyLocks.releasePolicy(request.getName()); + listener.onResponse(response); + }, + (exc) -> { + enrichPolicyLocks.releasePolicy(request.getName()); + listener.onFailure(exc); + } + )); + } + + private void deleteIndicesAndPolicy(String name, ActionListener listener) { + // delete all enrich indices for this policy + DeleteIndexRequest deleteRequest = new DeleteIndexRequest() + .indices(EnrichPolicy.getBaseName(name) + "-*") + .indicesOptions(LENIENT_OPTIONS); + + client.admin().indices().delete(deleteRequest, ActionListener.wrap( + (response) -> { + if (response.isAcknowledged() == false) { + listener.onFailure(new ElasticsearchStatusException("Could not fetch indices to delete during policy delete of [{}]", + RestStatus.INTERNAL_SERVER_ERROR, name)); + } else { + deletePolicy(name, listener); + } + }, + (error) -> listener.onFailure(error) + )); + } + + private void deletePolicy(String name, ActionListener listener) { + EnrichStore.deletePolicy(name, clusterService, e -> { + if (e == null) { + listener.onResponse(new AcknowledgedResponse(true)); + } else { + listener.onFailure(e); + } + }); + } + + @Override + protected ClusterBlockException checkBlock(DeleteEnrichPolicyAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java new file mode 100644 index 00000000000..62ad1d72c1b --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.ExecutingPolicy; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; + +import java.io.IOException; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + +public class TransportEnrichStatsAction extends TransportMasterNodeAction { + + private final Client client; + + @Inject + public TransportEnrichStatsAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + Client client) { + super(EnrichStatsAction.NAME, transportService, clusterService, threadPool, actionFilters, + EnrichStatsAction.Request::new, indexNameExpressionResolver); + this.client = client; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected EnrichStatsAction.Response read(StreamInput in) throws IOException { + return new EnrichStatsAction.Response(in); + } + + @Override + protected void masterOperation(EnrichStatsAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + EnrichCoordinatorStatsAction.Request statsRequest = new EnrichCoordinatorStatsAction.Request(); + ActionListener statsListener = ActionListener.wrap( + response -> { + if (response.hasFailures()) { + // Report failures even if some node level requests succeed: + Exception failure = null; + for (FailedNodeException nodeFailure : response.failures()) { + if (failure == null) { + failure = nodeFailure; + } else { + failure.addSuppressed(nodeFailure); + } + } + listener.onFailure(failure); + return; + } + + List coordinatorStats = response.getNodes().stream() + .map(EnrichCoordinatorStatsAction.NodeResponse::getCoordinatorStats) + .sorted(Comparator.comparing(CoordinatorStats::getNodeId)) + .collect(Collectors.toList()); + List policyExecutionTasks = taskManager.getTasks().values().stream() + .filter(t -> t.getAction().equals(ExecuteEnrichPolicyAction.NAME)) + .map(t -> t.taskInfo(clusterService.localNode().getId(), true)) + .map(t -> new ExecutingPolicy(t.getDescription(), t)) + .sorted(Comparator.comparing(ExecutingPolicy::getName)) + .collect(Collectors.toList()); + listener.onResponse(new EnrichStatsAction.Response(policyExecutionTasks, coordinatorStats)); + }, + listener::onFailure + ); + client.execute(EnrichCoordinatorStatsAction.INSTANCE, statsRequest, statsListener); + } + + @Override + protected ClusterBlockException checkBlock(EnrichStatsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportExecuteEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportExecuteEnrichPolicyAction.java new file mode 100644 index 00000000000..79810aca9b2 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportExecuteEnrichPolicyAction.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.LoggingTaskListener; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; +import org.elasticsearch.xpack.enrich.EnrichPolicyExecutor; +import org.elasticsearch.xpack.enrich.EnrichPolicyLocks; + +import java.io.IOException; + +public class TransportExecuteEnrichPolicyAction + extends TransportMasterNodeAction { + + private final EnrichPolicyExecutor executor; + + @Inject + public TransportExecuteEnrichPolicyAction(Settings settings, + Client client, + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + EnrichPolicyLocks enrichPolicyLocks) { + super(ExecuteEnrichPolicyAction.NAME, transportService, clusterService, threadPool, actionFilters, + ExecuteEnrichPolicyAction.Request::new, indexNameExpressionResolver); + this.executor = new EnrichPolicyExecutor(settings, clusterService, client, transportService.getTaskManager(), threadPool, + new IndexNameExpressionResolver(), enrichPolicyLocks, System::currentTimeMillis); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected ExecuteEnrichPolicyAction.Response read(StreamInput in) throws IOException { + return new ExecuteEnrichPolicyAction.Response(in); + } + + @Override + protected void masterOperation(ExecuteEnrichPolicyAction.Request request, ClusterState state, + ActionListener listener) { + if (request.isWaitForCompletion()) { + executor.runPolicy(request, new ActionListener() { + @Override + public void onResponse(ExecuteEnrichPolicyStatus executionStatus) { + listener.onResponse(new ExecuteEnrichPolicyAction.Response(executionStatus)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } else { + Task executeTask = executor.runPolicy(request, LoggingTaskListener.instance()); + TaskId taskId = new TaskId(clusterService.localNode().getId(), executeTask.getId()); + listener.onResponse(new ExecuteEnrichPolicyAction.Response(taskId)); + } + } + + @Override + protected ClusterBlockException checkBlock(ExecuteEnrichPolicyAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java new file mode 100644 index 00000000000..008f8941628 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyAction.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.EnrichStore; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class TransportGetEnrichPolicyAction extends TransportMasterNodeReadAction { + + @Inject + public TransportGetEnrichPolicyAction(TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(GetEnrichPolicyAction.NAME, transportService, clusterService, threadPool, actionFilters, + GetEnrichPolicyAction.Request::new, indexNameExpressionResolver); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + protected GetEnrichPolicyAction.Response newResponse() { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + protected GetEnrichPolicyAction.Response read(StreamInput in) throws IOException { + return new GetEnrichPolicyAction.Response(in); + } + + @Override + protected void masterOperation(GetEnrichPolicyAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + Map policies; + if (request.getNames() == null || request.getNames().isEmpty()) { + policies = EnrichStore.getPolicies(state); + } else { + policies = new HashMap<>(); + for (String name: request.getNames()) { + if (name.isEmpty() == false) { + EnrichPolicy policy = EnrichStore.getPolicy(name, state); + if (policy != null) { + policies.put(name, policy); + } + } + } + } + listener.onResponse(new GetEnrichPolicyAction.Response(policies)); + } + + @Override + protected ClusterBlockException checkBlock(GetEnrichPolicyAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java new file mode 100644 index 00000000000..fbfdd1aaad5 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.enrich.EnrichStore; + +import java.io.IOException; + +public class TransportPutEnrichPolicyAction extends TransportMasterNodeAction { + + private final XPackLicenseState licenseState; + private final SecurityContext securityContext; + private final Client client; + + @Inject + public TransportPutEnrichPolicyAction(Settings settings, TransportService transportService, + ClusterService clusterService, ThreadPool threadPool, Client client, + XPackLicenseState licenseState, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(PutEnrichPolicyAction.NAME, transportService, clusterService, threadPool, actionFilters, + PutEnrichPolicyAction.Request::new, indexNameExpressionResolver); + this.licenseState = licenseState; + this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ? + new SecurityContext(settings, threadPool.getThreadContext()) : null; + this.client = client; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + protected AcknowledgedResponse newResponse() { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + protected AcknowledgedResponse read(StreamInput in) throws IOException { + return new AcknowledgedResponse(in); + } + + @Override + protected void masterOperation(PutEnrichPolicyAction.Request request, ClusterState state, + ActionListener listener) { + + if (licenseState.isAuthAllowed()) { + RoleDescriptor.IndicesPrivileges privileges = RoleDescriptor.IndicesPrivileges.builder() + .indices(request.getPolicy().getIndices()) + .privileges("read") + .build(); + + String username = securityContext.getUser().principal(); + + HasPrivilegesRequest privRequest = new HasPrivilegesRequest(); + privRequest.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); + privRequest.username(username); + privRequest.clusterPrivileges(Strings.EMPTY_ARRAY); + privRequest.indexPrivileges(privileges); + + ActionListener wrappedListener = ActionListener.wrap( + r -> { + if (r.isCompleteMatch()) { + putPolicy(request, listener); + } else { + listener.onFailure(Exceptions.authorizationError("unable to store policy because no indices match with the " + + "specified index patterns {}", request.getPolicy().getIndices(), username)); + } + }, + listener::onFailure); + client.execute(HasPrivilegesAction.INSTANCE, privRequest, wrappedListener); + } else { + putPolicy(request, listener); + } + } + + private void putPolicy(PutEnrichPolicyAction.Request request, ActionListener listener ) { + EnrichStore.putPolicy(request.getName(), request.getPolicy(), clusterService, e -> { + if (e == null) { + listener.onResponse(new AcknowledgedResponse(true)); + } else { + listener.onFailure(e); + } + }); + } + + @Override + protected ClusterBlockException checkBlock(PutEnrichPolicyAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestDeleteEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestDeleteEnrichPolicyAction.java new file mode 100644 index 00000000000..01c23d91fed --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestDeleteEnrichPolicyAction.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; + +import java.io.IOException; + +public class RestDeleteEnrichPolicyAction extends BaseRestHandler { + + public RestDeleteEnrichPolicyAction(final RestController controller) { + controller.registerHandler(RestRequest.Method.DELETE, "/_enrich/policy/{name}", this); + } + + @Override + public String getName() { + return "delete_enrich_policy"; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + final DeleteEnrichPolicyAction.Request request = new DeleteEnrichPolicyAction.Request(restRequest.param("name")); + return channel -> client.execute(DeleteEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java new file mode 100644 index 00000000000..586e1c255dd --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestEnrichStatsAction.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; + +import java.io.IOException; + +public class RestEnrichStatsAction extends BaseRestHandler { + + public RestEnrichStatsAction(final RestController controller) { + controller.registerHandler(RestRequest.Method.GET, "/_enrich/_stats", this); + } + + @Override + public String getName() { + return "enrich_stats"; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + final EnrichStatsAction.Request request = new EnrichStatsAction.Request(); + return channel -> client.execute(EnrichStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestExecuteEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestExecuteEnrichPolicyAction.java new file mode 100644 index 00000000000..087117a6c1a --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestExecuteEnrichPolicyAction.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; + +import java.io.IOException; + +public class RestExecuteEnrichPolicyAction extends BaseRestHandler { + + public RestExecuteEnrichPolicyAction(final RestController controller) { + controller.registerHandler(RestRequest.Method.PUT, "/_enrich/policy/{name}/_execute", this); + controller.registerHandler(RestRequest.Method.POST, "/_enrich/policy/{name}/_execute", this); + } + + @Override + public String getName() { + return "execute_enrich_policy"; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + final ExecuteEnrichPolicyAction.Request request = new ExecuteEnrichPolicyAction.Request(restRequest.param("name")); + request.setWaitForCompletion(restRequest.paramAsBoolean("wait_for_completion", true)); + return channel -> client.execute(ExecuteEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java new file mode 100644 index 00000000000..3a279a49fa4 --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestGetEnrichPolicyAction.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; + +import java.io.IOException; + +public class RestGetEnrichPolicyAction extends BaseRestHandler { + + public RestGetEnrichPolicyAction(final RestController controller) { + controller.registerHandler(RestRequest.Method.GET, "/_enrich/policy/{name}", this); + controller.registerHandler(RestRequest.Method.GET, "/_enrich/policy", this); + } + + @Override + public String getName() { + return "get_enrich_policy"; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + String[] names = Strings.splitStringByCommaToArray(restRequest.param("name")); + final GetEnrichPolicyAction.Request request = new GetEnrichPolicyAction.Request(names); + return channel -> client.execute(GetEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestPutEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestPutEnrichPolicyAction.java new file mode 100644 index 00000000000..38d36447d8e --- /dev/null +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/rest/RestPutEnrichPolicyAction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.io.IOException; + +public class RestPutEnrichPolicyAction extends BaseRestHandler { + + public RestPutEnrichPolicyAction(final RestController controller) { + controller.registerHandler(RestRequest.Method.PUT, "/_enrich/policy/{name}", this); + } + + @Override + public String getName() { + return "put_enrich_policy"; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { + final PutEnrichPolicyAction.Request request = createRequest(restRequest); + return channel -> client.execute(PutEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + static PutEnrichPolicyAction.Request createRequest(RestRequest restRequest) throws IOException { + try (XContentParser parser = restRequest.contentOrSourceParamParser()) { + return PutEnrichPolicyAction.fromXContent(parser, restRequest.param("name")); + } + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/AbstractEnrichTestCase.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/AbstractEnrichTestCase.java new file mode 100644 index 00000000000..6d52a1d1e12 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/AbstractEnrichTestCase.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.util.Collection; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +public abstract class AbstractEnrichTestCase extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singletonList(LocalStateEnrich.class); + } + + protected AtomicReference saveEnrichPolicy(String name, EnrichPolicy policy, + ClusterService clusterService) throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + AtomicReference error = new AtomicReference<>(); + EnrichStore.putPolicy(name, policy, clusterService, e -> { + error.set(e); + latch.countDown(); + }); + latch.await(); + return error; + } + + protected void deleteEnrichPolicy(String name, ClusterService clusterService) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + AtomicReference error = new AtomicReference<>(); + EnrichStore.deletePolicy(name, clusterService, e -> { + error.set(e); + latch.countDown(); + }); + latch.await(); + if (error.get() != null){ + throw error.get(); + } + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java new file mode 100644 index 00000000000..871f37288e4 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java @@ -0,0 +1,295 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; +import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.reindex.ReindexPlugin; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.DECORATE_FIELDS; +import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.MATCH_FIELD; +import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.SOURCE_INDEX_NAME; +import static org.elasticsearch.xpack.enrich.MatchProcessorTests.mapOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class BasicEnrichTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Arrays.asList(LocalStateEnrich.class, ReindexPlugin.class, IngestCommonPlugin.class); + } + + @Override + protected boolean resetNodeAfterTest() { + return true; + } + + public void testIngestDataWithMatchProcessor() { + int numDocs = 32; + int maxMatches = randomIntBetween(2, 8); + List keys = createSourceMatchIndex(numDocs, maxMatches); + + String policyName = "my-policy"; + EnrichPolicy enrichPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, + Arrays.asList(SOURCE_INDEX_NAME), MATCH_FIELD, Arrays.asList(DECORATE_FIELDS)); + PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, request).actionGet(); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(policyName)).actionGet(); + + String pipelineName = "my-pipeline"; + String pipelineBody = "{\"processors\": [{\"enrich\": {\"policy_name\":\"" + policyName + + "\", \"field\": \"" + MATCH_FIELD + "\", \"target_field\": \"users\", \"max_matches\": " + maxMatches + "}}]}"; + PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); + client().admin().cluster().putPipeline(putPipelineRequest).actionGet(); + + BulkRequest bulkRequest = new BulkRequest("my-index"); + for (int i = 0; i < numDocs; i++) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.id(Integer.toString(i)); + indexRequest.setPipeline(pipelineName); + indexRequest.source(Collections.singletonMap(MATCH_FIELD, keys.get(i))); + bulkRequest.add(indexRequest); + } + BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + assertThat("Expected no failure, but " + bulkResponse.buildFailureMessage(), bulkResponse.hasFailures(), is(false)); + int expectedId = 0; + for (BulkItemResponse itemResponse : bulkResponse) { + assertThat(itemResponse.getId(), equalTo(Integer.toString(expectedId++))); + } + + for (int doc = 0; doc < numDocs; doc++) { + GetResponse getResponse = client().get(new GetRequest("my-index", Integer.toString(doc))).actionGet(); + Map source = getResponse.getSourceAsMap(); + List userEntries = (List) source.get("users"); + assertThat(userEntries, notNullValue()); + assertThat(userEntries.size(), equalTo(maxMatches)); + for (int i = 0; i < maxMatches; i++) { + Map userEntry = (Map) userEntries.get(i); + assertThat(userEntry.size(), equalTo(DECORATE_FIELDS.length + 1)); + for (int j = 0; j < 3; j++) { + String field = DECORATE_FIELDS[j]; + assertThat(userEntry.get(field), equalTo(keys.get(doc) + j)); + } + assertThat(keys.contains(userEntry.get(MATCH_FIELD)), is(true)); + } + } + + EnrichStatsAction.Response statsResponse = + client().execute(EnrichStatsAction.INSTANCE, new EnrichStatsAction.Request()).actionGet(); + assertThat(statsResponse.getCoordinatorStats().size(), equalTo(1)); + String localNodeId = getInstanceFromNode(ClusterService.class).localNode().getId(); + assertThat(statsResponse.getCoordinatorStats().get(0).getNodeId(), equalTo(localNodeId)); + assertThat(statsResponse.getCoordinatorStats().get(0).getRemoteRequestsTotal(), greaterThanOrEqualTo(1L)); + assertThat(statsResponse.getCoordinatorStats().get(0).getExecutedSearchesTotal(), equalTo((long) numDocs)); + } + + public void testIngestDataWithGeoMatchProcessor() { + String matchField = "location"; + String enrichField = "zipcode"; + // create enrich index + { + IndexRequest indexRequest = new IndexRequest(SOURCE_INDEX_NAME); + indexRequest.source(mapOf(matchField, "POLYGON((" + + "-122.08592534065245 37.38501746624134," + + "-122.08193421363829 37.38501746624134," + + "-122.08193421363829 37.3879329075567," + + "-122.08592534065245 37.3879329075567," + + "-122.08592534065245 37.38501746624134))", + "zipcode", "94040")); + client().index(indexRequest).actionGet(); + client().admin().indices().refresh(new RefreshRequest(SOURCE_INDEX_NAME)).actionGet(); + } + + String policyName = "my-policy"; + EnrichPolicy enrichPolicy = + new EnrichPolicy(EnrichPolicy.GEO_MATCH_TYPE, null, Arrays.asList(SOURCE_INDEX_NAME), matchField, Arrays.asList(enrichField)); + PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, request).actionGet(); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(policyName)).actionGet(); + + String pipelineName = "my-pipeline"; + String pipelineBody = "{\"processors\": [{\"enrich\": {\"policy_name\":\"" + policyName + + "\", \"field\": \"" + matchField + "\", \"target_field\": \"enriched\", \"max_matches\": 1 }}]}"; + PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); + client().admin().cluster().putPipeline(putPipelineRequest).actionGet(); + + BulkRequest bulkRequest = new BulkRequest("my-index"); + IndexRequest indexRequest = new IndexRequest(); + indexRequest.id("_id"); + indexRequest.setPipeline(pipelineName); + indexRequest.source(mapOf(matchField, "37.386444, -122.083863")); // point within match boundary + bulkRequest.add(indexRequest); + BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + assertThat("Expected no failure, but " + bulkResponse.buildFailureMessage(), bulkResponse.hasFailures(), is(false)); + assertThat(bulkResponse.getItems().length, equalTo(1)); + assertThat(bulkResponse.getItems()[0].getId(), equalTo("_id")); + + GetResponse getResponse = client().get(new GetRequest("my-index", "_id")).actionGet(); + Map source = getResponse.getSourceAsMap(); + Map entries = (Map) source.get("enriched"); + assertThat(entries, notNullValue()); + assertThat(entries.size(), equalTo(2)); + assertThat(entries.containsKey(matchField), is(true)); + assertThat(entries.get(enrichField), equalTo("94040")); + + EnrichStatsAction.Response statsResponse = + client().execute(EnrichStatsAction.INSTANCE, new EnrichStatsAction.Request()).actionGet(); + assertThat(statsResponse.getCoordinatorStats().size(), equalTo(1)); + String localNodeId = getInstanceFromNode(ClusterService.class).localNode().getId(); + assertThat(statsResponse.getCoordinatorStats().get(0).getNodeId(), equalTo(localNodeId)); + assertThat(statsResponse.getCoordinatorStats().get(0).getRemoteRequestsTotal(), greaterThanOrEqualTo(1L)); + assertThat(statsResponse.getCoordinatorStats().get(0).getExecutedSearchesTotal(), equalTo(1L)); + } + + public void testMultiplePolicies() { + int numPolicies = 8; + for (int i = 0; i < numPolicies; i++) { + String policyName = "policy" + i; + + IndexRequest indexRequest = new IndexRequest("source-" + i); + indexRequest.source("key", "key", "value", "val" + i); + client().index(indexRequest).actionGet(); + client().admin().indices().refresh(new RefreshRequest("source-" + i)).actionGet(); + + EnrichPolicy enrichPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, + Collections.singletonList("source-" + i), "key", Collections.singletonList("value")); + PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, request).actionGet(); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(policyName)).actionGet(); + + String pipelineName = "pipeline" + i; + String pipelineBody = "{\"processors\": [{\"enrich\": {\"policy_name\":\"" + policyName + + "\", \"field\": \"key\", \"target_field\": \"target\"}}]}"; + PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); + client().admin().cluster().putPipeline(putPipelineRequest).actionGet(); + } + + BulkRequest bulkRequest = new BulkRequest("my-index"); + for (int i = 0; i < numPolicies; i++) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.id(Integer.toString(i)); + indexRequest.setPipeline("pipeline" + i); + indexRequest.source(Collections.singletonMap("key", "key")); + bulkRequest.add(indexRequest); + } + BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + assertThat("Expected no failure, but " + bulkResponse.buildFailureMessage(), bulkResponse.hasFailures(), is(false)); + + for (int i = 0; i < numPolicies; i++) { + GetResponse getResponse = client().get(new GetRequest("my-index", Integer.toString(i))).actionGet(); + Map source = getResponse.getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("target"), equalTo(mapOf("key", "key", "value", "val" + i))); + } + } + + public void testAsyncTaskExecute() throws Exception { + String policyName = "async-policy"; + String sourceIndexName = "async-policy-source"; + + { + IndexRequest indexRequest = new IndexRequest(sourceIndexName); + indexRequest.source("key", "key", "value", "val1"); + client().index(indexRequest).actionGet(); + client().admin().indices().refresh(new RefreshRequest(sourceIndexName)).actionGet(); + } + + EnrichPolicy enrichPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndexName), "key", + Collections.singletonList("value")); + PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, request).actionGet(); + ExecuteEnrichPolicyAction.Response executeResponse = client() + .execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(policyName).setWaitForCompletion(false)) + .actionGet(); + + assertThat(executeResponse.getStatus(), is(nullValue())); + assertThat(executeResponse.getTaskId(), is(not(nullValue()))); + GetTaskRequest getPolicyTaskRequest = new GetTaskRequest().setTaskId(executeResponse.getTaskId()).setWaitForCompletion(true); + assertBusy(() -> { + GetTaskResponse taskResponse = client().execute(GetTaskAction.INSTANCE, getPolicyTaskRequest).actionGet(); + assertThat(((ExecuteEnrichPolicyStatus) taskResponse.getTask().getTask().getStatus()).getPhase(), + is(ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE)); + }); + + String pipelineName = "test-pipeline"; + String pipelineBody = "{\"processors\": [{\"enrich\": {\"policy_name\":\"" + policyName + + "\", \"field\": \"key\", \"target_field\": \"target\"}}]}"; + PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); + client().admin().cluster().putPipeline(putPipelineRequest).actionGet(); + + BulkRequest bulkRequest = new BulkRequest("my-index"); + int numTestDocs = randomIntBetween(3, 10); + for (int i = 0; i < numTestDocs; i++) { + IndexRequest indexRequest = new IndexRequest("my-index"); + indexRequest.id(Integer.toString(i)); + indexRequest.setPipeline(pipelineName); + indexRequest.source(Collections.singletonMap("key", "key")); + bulkRequest.add(indexRequest); + } + BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + assertThat("Expected no failure, but " + bulkResponse.buildFailureMessage(), bulkResponse.hasFailures(), is(false)); + + for (int i = 0; i < numTestDocs; i++) { + GetResponse getResponse = client().get(new GetRequest("my-index", Integer.toString(i))).actionGet(); + Map source = getResponse.getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("target"), equalTo(mapOf("key", "key", "value", "val1"))); + } + } + + private List createSourceMatchIndex(int numKeys, int numDocsPerKey) { + Set keys = new HashSet<>(); + for (int id = 0; id < numKeys; id++) { + String key; + do { + key = randomAlphaOfLength(16); + } while (keys.add(key) == false); + + for (int doc = 0; doc < numDocsPerKey; doc++) { + IndexRequest indexRequest = new IndexRequest(SOURCE_INDEX_NAME); + indexRequest.source(mapOf(MATCH_FIELD, key, DECORATE_FIELDS[0], key + "0", + DECORATE_FIELDS[1], key + "1", DECORATE_FIELDS[2], key + "2")); + client().index(indexRequest).actionGet(); + } + } + client().admin().indices().refresh(new RefreshRequest(SOURCE_INDEX_NAME)).actionGet(); + return new ArrayList<>(keys); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichMetadataTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichMetadataTests.java new file mode 100644 index 00000000000..a0474b1216b --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichMetadataTests.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.randomEnrichPolicy; +import static org.hamcrest.Matchers.equalTo; + +public class EnrichMetadataTests extends AbstractSerializingTestCase { + + @Override + protected EnrichMetadata doParseInstance(XContentParser parser) throws IOException { + return EnrichMetadata.fromXContent(parser); + } + + @Override + protected EnrichMetadata createTestInstance() { + return randomEnrichMetadata(randomFrom(XContentType.values())); + } + + @Override + protected EnrichMetadata createXContextTestInstance(XContentType xContentType) { + return randomEnrichMetadata(xContentType); + } + + private static EnrichMetadata randomEnrichMetadata(XContentType xContentType) { + int numPolicies = randomIntBetween(8, 64); + Map policies = new HashMap<>(numPolicies); + for (int i = 0; i < numPolicies; i++) { + EnrichPolicy policy = randomEnrichPolicy(xContentType); + policies.put(randomAlphaOfLength(8), policy); + } + return new EnrichMetadata(policies); + } + + @Override + protected Writeable.Reader instanceReader() { + return EnrichMetadata::new; + } + + @Override + protected void assertEqualInstances(EnrichMetadata expectedInstance, EnrichMetadata newInstance) { + assertNotSame(expectedInstance, newInstance); + assertThat(newInstance.getPolicies().size(), equalTo(expectedInstance.getPolicies().size())); + for (Map.Entry entry : newInstance.getPolicies().entrySet()) { + EnrichPolicy actual = entry.getValue(); + EnrichPolicy expected = expectedInstance.getPolicies().get(entry.getKey()); + EnrichPolicyTests.assertEqualPolicies(expected, actual); + } + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java new file mode 100644 index 00000000000..35adfc5a0c3 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.reindex.ReindexPlugin; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.enrich.MatchProcessorTests.mapOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) +public class EnrichMultiNodeIT extends ESIntegTestCase { + + static final String POLICY_NAME = "my-policy"; + private static final String PIPELINE_NAME = "my-pipeline"; + static final String SOURCE_INDEX_NAME = "users"; + static final String MATCH_FIELD = "email"; + static final String[] DECORATE_FIELDS = new String[]{"address", "city", "country"}; + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateEnrich.class, ReindexPlugin.class, IngestCommonPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return nodePlugins(); + } + + @Override + protected Settings transportClientSettings() { + return Settings.builder().put(super.transportClientSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); + } + + public void testEnrichAPIs() { + final int numPolicies = randomIntBetween(2, 4); + internalCluster().startNodes(randomIntBetween(2, 3)); + int numDocsInSourceIndex = randomIntBetween(8, 32); + createSourceIndex(numDocsInSourceIndex); + + for (int i = 0; i < numPolicies; i++) { + String policyName = POLICY_NAME + i; + EnrichPolicy enrichPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, + Arrays.asList(SOURCE_INDEX_NAME), MATCH_FIELD, Arrays.asList(DECORATE_FIELDS)); + PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, request).actionGet(); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(policyName)).actionGet(); + + EnrichPolicy.NamedPolicy result = + client().execute(GetEnrichPolicyAction.INSTANCE, + new GetEnrichPolicyAction.Request(new String[]{policyName})).actionGet().getPolicies().get(0); + assertThat(result, equalTo(new EnrichPolicy.NamedPolicy(policyName, enrichPolicy))); + String enrichIndexPrefix = EnrichPolicy.getBaseName(policyName) + "*"; + refresh(enrichIndexPrefix); + SearchResponse searchResponse = client().search(new SearchRequest(enrichIndexPrefix)).actionGet(); + assertThat(searchResponse.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocsInSourceIndex)); + } + + GetEnrichPolicyAction.Response response = + client().execute(GetEnrichPolicyAction.INSTANCE, new GetEnrichPolicyAction.Request()).actionGet(); + assertThat(response.getPolicies().size(), equalTo(numPolicies)); + + for (int i = 0; i < numPolicies; i++) { + String policyName = POLICY_NAME + i; + client().execute(DeleteEnrichPolicyAction.INSTANCE, new DeleteEnrichPolicyAction.Request(policyName)).actionGet(); + } + + response = client().execute(GetEnrichPolicyAction.INSTANCE, new GetEnrichPolicyAction.Request()).actionGet(); + assertThat(response.getPolicies().size(), equalTo(0)); + } + + public void testEnrich() { + List nodes = internalCluster().startNodes(3); + List keys = createSourceIndex(64); + createAndExecutePolicy(); + createPipeline(); + enrich(keys, randomFrom(nodes)); + } + + public void testEnrichDedicatedIngestNode() { + internalCluster().startNode(); + Settings settings = Settings.builder() + .put(Node.NODE_MASTER_SETTING.getKey(), false) + .put(Node.NODE_DATA_SETTING.getKey(), false) + .put(Node.NODE_INGEST_SETTING.getKey(), true) + .build(); + String ingestOnlyNode = internalCluster().startNode(settings); + + List keys = createSourceIndex(64); + createAndExecutePolicy(); + createPipeline(); + enrich(keys, ingestOnlyNode); + } + + private static void enrich(List keys, String coordinatingNode) { + int numDocs = 256; + BulkRequest bulkRequest = new BulkRequest("my-index"); + for (int i = 0; i < numDocs; i++) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.id(Integer.toString(i)); + indexRequest.setPipeline(PIPELINE_NAME); + indexRequest.source(Collections.singletonMap(MATCH_FIELD, randomFrom(keys))); + bulkRequest.add(indexRequest); + } + BulkResponse bulkResponse = client(coordinatingNode).bulk(bulkRequest).actionGet(); + assertThat("Expected no failure, but " + bulkResponse.buildFailureMessage(), bulkResponse.hasFailures(), is(false)); + int expectedId = 0; + for (BulkItemResponse itemResponse : bulkResponse) { + assertThat(itemResponse.getId(), equalTo(Integer.toString(expectedId++))); + } + + for (int i = 0; i < numDocs; i++) { + GetResponse getResponse = client().get(new GetRequest("my-index", Integer.toString(i))).actionGet(); + Map source = getResponse.getSourceAsMap(); + Map userEntry = (Map) source.get("user"); + assertThat(userEntry.size(), equalTo(DECORATE_FIELDS.length + 1)); + assertThat(keys.contains(userEntry.get(MATCH_FIELD)), is(true)); + for (String field : DECORATE_FIELDS) { + assertThat(userEntry.get(field), notNullValue()); + } + } + + EnrichStatsAction.Response statsResponse = + client().execute(EnrichStatsAction.INSTANCE, new EnrichStatsAction.Request()).actionGet(); + assertThat(statsResponse.getCoordinatorStats().size(), equalTo(internalCluster().size())); + String nodeId = internalCluster().getInstance(ClusterService.class, coordinatingNode).localNode().getId(); + CoordinatorStats stats = statsResponse.getCoordinatorStats().stream() + .filter(s -> s.getNodeId().equals(nodeId)) + .findAny() + .get(); + assertThat(stats.getNodeId(), equalTo(nodeId)); + assertThat(stats.getRemoteRequestsTotal(), greaterThanOrEqualTo(1L)); + assertThat(stats.getExecutedSearchesTotal(), equalTo((long) numDocs)); + } + + private static List createSourceIndex(int numDocs) { + Set keys = new HashSet<>(); + for (int i = 0; i < numDocs; i++) { + String key; + do { + key = randomAlphaOfLength(16); + } while (keys.add(key) == false); + + IndexRequest indexRequest = new IndexRequest(SOURCE_INDEX_NAME); + indexRequest.create(true); + indexRequest.id(key); + indexRequest.source(mapOf(MATCH_FIELD, key, DECORATE_FIELDS[0], randomAlphaOfLength(4), + DECORATE_FIELDS[1], randomAlphaOfLength(4), DECORATE_FIELDS[2], randomAlphaOfLength(4))); + client().index(indexRequest).actionGet(); + } + client().admin().indices().refresh(new RefreshRequest(SOURCE_INDEX_NAME)).actionGet(); + return new ArrayList<>(keys); + } + + private static void createAndExecutePolicy() { + EnrichPolicy enrichPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, + Arrays.asList(SOURCE_INDEX_NAME), MATCH_FIELD, Arrays.asList(DECORATE_FIELDS)); + PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(POLICY_NAME, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, request).actionGet(); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(POLICY_NAME)).actionGet(); + } + + private static void createPipeline() { + String pipelineBody = "{\"processors\": [{\"enrich\": {\"policy_name\":\"" + POLICY_NAME + + "\", \"field\": \"" + MATCH_FIELD + "\", \"target_field\": \"user\"}}]}"; + PutPipelineRequest request = new PutPipelineRequest(PIPELINE_NAME, new BytesArray(pipelineBody), XContentType.JSON); + client().admin().cluster().putPipeline(request).actionGet(); + } + +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java new file mode 100644 index 00000000000..7d11a69e06a --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java @@ -0,0 +1,202 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.enrich; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.function.LongSupplier; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import static org.hamcrest.CoreMatchers.containsString; + +public class EnrichPolicyExecutorTests extends ESTestCase { + + private static ThreadPool testThreadPool; + private static TaskManager testTaskManager; + private static final ActionListener noOpListener = new ActionListener() { + @Override + public void onResponse(ExecuteEnrichPolicyStatus ignored) { } + + @Override + public void onFailure(Exception e) { } + }; + + @BeforeClass + public static void beforeCLass() { + testThreadPool = new TestThreadPool("EnrichPolicyExecutorTests"); + testTaskManager = new TaskManager(Settings.EMPTY, testThreadPool, Collections.emptySet()); + } + + @AfterClass + public static void afterClass() { + ThreadPool.terminate(testThreadPool, 30, TimeUnit.SECONDS); + } + + /** + * A policy runner drop-in replacement that just waits on a given countdown latch, and reports success after the latch is counted down. + */ + private static class BlockingTestPolicyRunner implements Runnable { + private final CountDownLatch latch; + private final ExecuteEnrichPolicyTask task; + private final ActionListener listener; + + BlockingTestPolicyRunner(CountDownLatch latch, ExecuteEnrichPolicyTask task, + ActionListener listener) { + this.latch = latch; + this.task = task; + this.listener = listener; + } + + @Override + public void run() { + try { + task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.RUNNING)); + latch.await(); + ExecuteEnrichPolicyStatus newStatus = new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE); + task.setStatus(newStatus); + listener.onResponse(newStatus); + } catch (InterruptedException e) { + throw new RuntimeException("Interrupted waiting for test framework to continue the test", e); + } + } + } + + /** + * A mocked policy executor that accepts policy execution requests which block until the returned latch is decremented. Allows for + * controlling the timing for "in flight" policy executions to test for correct locking logic. + */ + private static class EnrichPolicyTestExecutor extends EnrichPolicyExecutor { + + EnrichPolicyTestExecutor(Settings settings, ClusterService clusterService, Client client, TaskManager taskManager, + ThreadPool threadPool, IndexNameExpressionResolver indexNameExpressionResolver, + LongSupplier nowSupplier) { + super(settings, clusterService, client, taskManager, threadPool, indexNameExpressionResolver, new EnrichPolicyLocks(), + nowSupplier); + } + + private CountDownLatch currentLatch; + CountDownLatch testRunPolicy(String policyName, EnrichPolicy policy, ActionListener listener) { + currentLatch = new CountDownLatch(1); + ExecuteEnrichPolicyAction.Request request = new ExecuteEnrichPolicyAction.Request(policyName); + runPolicy(request, policy, listener); + return currentLatch; + } + + @Override + protected Runnable createPolicyRunner(String policyName, EnrichPolicy policy, ExecuteEnrichPolicyTask task, + ActionListener listener) { + if (currentLatch == null) { + throw new IllegalStateException("Use the testRunPolicy method on this test instance"); + } + return new BlockingTestPolicyRunner(currentLatch, task, listener); + } + } + + public void testNonConcurrentPolicyExecution() throws InterruptedException { + String testPolicyName = "test_policy"; + EnrichPolicy testPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList("some_index"), "keyfield", + Collections.singletonList("valuefield")); + final EnrichPolicyTestExecutor testExecutor = new EnrichPolicyTestExecutor(Settings.EMPTY, null, null, testTaskManager, + testThreadPool, new IndexNameExpressionResolver(), ESTestCase::randomNonNegativeLong); + + // Launch a fake policy run that will block until firstTaskBlock is counted down. + final CountDownLatch firstTaskComplete = new CountDownLatch(1); + final CountDownLatch firstTaskBlock = testExecutor.testRunPolicy(testPolicyName, testPolicy, + new LatchedActionListener<>(noOpListener, firstTaskComplete)); + + // Launch a second fake run that should fail immediately because the lock is obtained. + EsRejectedExecutionException expected = expectThrows(EsRejectedExecutionException.class, + "Expected exception but nothing was thrown", () -> { + CountDownLatch countDownLatch = testExecutor.testRunPolicy(testPolicyName, testPolicy, noOpListener); + // Should throw exception on the previous statement, but if it doesn't, be a + // good citizen and conclude the fake runs to keep the logs clean from interrupted exceptions + countDownLatch.countDown(); + firstTaskBlock.countDown(); + firstTaskComplete.await(); + }); + + // Conclude the first mock run + firstTaskBlock.countDown(); + firstTaskComplete.await(); + + // Validate exception from second run + assertThat(expected.getMessage(), containsString("Could not obtain lock because policy execution for [" + testPolicyName + + "] is already in progress.")); + + // Ensure that the lock from the previous run has been cleared + CountDownLatch secondTaskComplete = new CountDownLatch(1); + CountDownLatch secondTaskBlock = testExecutor.testRunPolicy(testPolicyName, testPolicy, + new LatchedActionListener<>(noOpListener, secondTaskComplete)); + secondTaskBlock.countDown(); + secondTaskComplete.await(); + } + + public void testMaximumPolicyExecutionLimit() throws InterruptedException { + String testPolicyBaseName = "test_policy_"; + Settings testSettings = Settings.builder().put(EnrichPlugin.ENRICH_MAX_CONCURRENT_POLICY_EXECUTIONS.getKey(), 2).build(); + EnrichPolicy testPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList("some_index"), "keyfield", + Collections.singletonList("valuefield")); + final EnrichPolicyTestExecutor testExecutor = new EnrichPolicyTestExecutor(testSettings, null, null, testTaskManager, + testThreadPool, new IndexNameExpressionResolver(), ESTestCase::randomNonNegativeLong); + + // Launch a two fake policy runs that will block until counted down to use up the maximum concurrent + final CountDownLatch firstTaskComplete = new CountDownLatch(1); + final CountDownLatch firstTaskBlock = testExecutor.testRunPolicy(testPolicyBaseName + "1", testPolicy, + new LatchedActionListener<>(noOpListener, firstTaskComplete)); + + final CountDownLatch secondTaskComplete = new CountDownLatch(1); + final CountDownLatch secondTaskBlock = testExecutor.testRunPolicy(testPolicyBaseName + "2", testPolicy, + new LatchedActionListener<>(noOpListener, secondTaskComplete)); + + // Launch a third fake run that should fail immediately because the lock is obtained. + EsRejectedExecutionException expected = expectThrows(EsRejectedExecutionException.class, + "Expected exception but nothing was thrown", () -> { + CountDownLatch countDownLatch = testExecutor.testRunPolicy(testPolicyBaseName + "3", testPolicy, noOpListener); + // Should throw exception on the previous statement, but if it doesn't, be a + // good citizen and conclude the fake runs to keep the logs clean from interrupted exceptions + countDownLatch.countDown(); + firstTaskBlock.countDown(); + secondTaskBlock.countDown(); + firstTaskComplete.await(); + secondTaskComplete.await(); + }); + + // Conclude the first mock run + firstTaskBlock.countDown(); + secondTaskBlock.countDown(); + firstTaskComplete.await(); + secondTaskComplete.await(); + + // Validate exception from second run + assertThat(expected.getMessage(), containsString("Policy execution failed. Policy execution for [test_policy_3] would exceed " + + "maximum concurrent policy executions [2]")); + + // Ensure that the lock from the previous run has been cleared + CountDownLatch finalTaskComplete = new CountDownLatch(1); + CountDownLatch finalTaskBlock = testExecutor.testRunPolicy(testPolicyBaseName + "1", testPolicy, + new LatchedActionListener<>(noOpListener, finalTaskComplete)); + finalTaskBlock.countDown(); + finalTaskComplete.await(); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyLocksTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyLocksTests.java new file mode 100644 index 00000000000..bc0a8c2fdf9 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyLocksTests.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; + +public class EnrichPolicyLocksTests extends ESTestCase { + + public void testLockPolicy() { + EnrichPolicyLocks policyLocks = new EnrichPolicyLocks(); + String policy1 = "policy1"; + String policy2 = "policy2"; + + // Lock + policyLocks.lockPolicy(policy1); + + // Ensure that locked policies are rejected + EsRejectedExecutionException exception1 = expectThrows(EsRejectedExecutionException.class, + () -> policyLocks.lockPolicy(policy1)); + assertThat(exception1.getMessage(), is(equalTo("Could not obtain lock because policy execution for [policy1]" + + " is already in progress."))); + + policyLocks.lockPolicy(policy2); + EsRejectedExecutionException exception2 = expectThrows(EsRejectedExecutionException.class, + () -> policyLocks.lockPolicy(policy2)); + + assertThat(exception2.getMessage(), is(equalTo("Could not obtain lock because policy execution for [policy2]" + + " is already in progress."))); + } + + public void testSafePoint() { + EnrichPolicyLocks policyLocks = new EnrichPolicyLocks(); + String policy = "policy"; + EnrichPolicyLocks.EnrichPolicyExecutionState executionState; + + // Get exec state - should note as safe and revision 1 since nothing has happened yet + executionState = policyLocks.captureExecutionState(); + assertThat(executionState.anyPolicyInFlight, is(false)); + assertThat(executionState.executions, is(0L)); + assertThat(policyLocks.isSameState(executionState), is(true)); + + // Get another exec state - should still note as safe and revision 1 since nothing has happened yet + executionState = policyLocks.captureExecutionState(); + assertThat(executionState.anyPolicyInFlight, is(false)); + assertThat(executionState.executions, is(0L)); + assertThat(policyLocks.isSameState(executionState), is(true)); + + // Lock a policy and leave it open (a + policyLocks.lockPolicy(policy); + + // Get a third exec state - should have a new revision and report unsafe since execution is in progress + executionState = policyLocks.captureExecutionState(); + assertThat(executionState.anyPolicyInFlight, is(true)); + assertThat(executionState.executions, is(1L)); + + // Unlock the policy + policyLocks.releasePolicy(policy); + + // Get a fourth exec state - should have the same revision as third, and report no policies in flight since the previous execution + // is complete + executionState = policyLocks.captureExecutionState(); + assertThat(executionState.anyPolicyInFlight, is(false)); + assertThat(executionState.executions, is(1L)); + + // Create a fifth exec state, lock and release a policy, and check if the captured exec state is the same as the current state in + // the lock object + executionState = policyLocks.captureExecutionState(); + assertThat(executionState.anyPolicyInFlight, is(false)); + assertThat(executionState.executions, is(1L)); + policyLocks.lockPolicy(policy); + policyLocks.releasePolicy(policy); + // Should report as not the same as there was a transient "policy execution" between getting the exec state and checking it. + assertThat(policyLocks.isSameState(executionState), is(false)); + } + + public void testReleasePolicy() { + EnrichPolicyLocks policyLocks = new EnrichPolicyLocks(); + String policy1 = "policy1"; + String policy2 = "policy2"; + + // Lock + policyLocks.lockPolicy(policy1); + policyLocks.lockPolicy(policy2); + + // Unlock + policyLocks.releasePolicy(policy1); + policyLocks.releasePolicy(policy2); + + // Ensure locking again after release works + policyLocks.lockPolicy(policy1); + policyLocks.lockPolicy(policy2); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceServiceTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceServiceTests.java new file mode 100644 index 00000000000..670d4437627 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyMaintenanceServiceTests.java @@ -0,0 +1,197 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.MATCH_TYPE; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; + +public class EnrichPolicyMaintenanceServiceTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singletonList(LocalStateEnrich.class); + } + + private int indexNameAutoIncrementingCounter = 0; + + public void testIndexRemoval() throws Exception { + // Create a test enabled maintenance service + EnrichPolicyMaintenanceService maintenanceService = createMaintenanceService(); + + // Add some random policies for the maintenance thread to reference + addPolicy("policy1", randomPolicy()); + addPolicy("policy2", randomPolicy()); + + // Create some indices for the policies + Set expectedIndices = new HashSet<>(); + String policy1Index1 = fakeRunPolicy("policy1"); + expectedIndices.add(policy1Index1); + String policy2Index1 = fakeRunPolicy("policy2"); + expectedIndices.add(policy2Index1); + + // Ensure that the expected indices exist + assertEnrichIndicesExist(expectedIndices); + + // Do cleanup - shouldn't find anything to clean up + maintenanceService.cleanUpEnrichIndices(); + + // Ensure that the expected indices still exist + assertEnrichIndicesExist(expectedIndices); + + // Replace a policy index with a new one + String policy1Index2 = fakeRunPolicy("policy1"); + expectedIndices.add(policy1Index2); + + // Ensure all three indices exist + assertEnrichIndicesExist(expectedIndices); + + // Should clean up the first index for the first policy + maintenanceService.cleanUpEnrichIndices(); + + // Ensure only the two most recent indices exist + expectedIndices.remove(policy1Index1); + assertEnrichIndicesExist(expectedIndices); + + // Remove a policy to simulate an abandoned index with a valid alias, but no policy + removePolicy("policy2"); + + // Should cleanup the first index for the second policy + maintenanceService.cleanUpEnrichIndices(); + + // Ensure only the first policy's index is left + expectedIndices.remove(policy2Index1); + assertEnrichIndicesExist(expectedIndices); + + // Clean up the remaining policy indices + removePolicy("policy1"); + maintenanceService.cleanUpEnrichIndices(); + expectedIndices.remove(policy1Index2); + assertEnrichIndicesExist(expectedIndices); + } + + private void assertEnrichIndicesExist(Set activeIndices) { + GetIndexResponse indices = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-*")).actionGet(); + assertThat(indices.indices().length, is(equalTo(activeIndices.size()))); + for (String index : indices.indices()) { + assertThat(activeIndices.contains(index), is(true)); + } + } + + private EnrichPolicy randomPolicy() { + List enrichKeys = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(1, 3); i++) { + enrichKeys.add(randomAlphaOfLength(10)); + } + return new EnrichPolicy(MATCH_TYPE, null, Collections.singletonList(randomAlphaOfLength(10)), randomAlphaOfLength(10), + enrichKeys); + } + + private void addPolicy(String policyName, EnrichPolicy policy) throws InterruptedException { + doSyncronously((clusterService, exceptionConsumer) -> + EnrichStore.putPolicy(policyName, policy, clusterService, exceptionConsumer)); + } + + private void removePolicy(String policyName) throws InterruptedException { + doSyncronously((clusterService, exceptionConsumer) -> + EnrichStore.deletePolicy(policyName, clusterService, exceptionConsumer)); + } + + private void doSyncronously(BiConsumer> function) throws InterruptedException { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference failure = new AtomicReference<>(null); + Consumer waitingHandler = e -> { + failure.set(e); + latch.countDown(); + }; + function.accept(clusterService, waitingHandler); + latch.await(); + Exception exception = failure.get(); + if (exception != null) { + throw new RuntimeException("Exception while modifying policy", exception); + } + } + + private String fakeRunPolicy(String forPolicy) throws IOException { + String newIndexName = EnrichPolicy.getBaseName(forPolicy) + "-" + indexNameAutoIncrementingCounter++; + CreateIndexRequest request = new CreateIndexRequest(newIndexName) + .mapping( + MapperService.SINGLE_MAPPING_NAME, JsonXContent.contentBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("_meta") + .field(EnrichPolicyRunner.ENRICH_POLICY_NAME_FIELD_NAME, forPolicy) + .endObject() + .endObject() + .endObject() + ); + client().admin().indices().create(request).actionGet(); + promoteFakePolicyIndex(newIndexName, forPolicy); + return newIndexName; + } + + private void promoteFakePolicyIndex(String indexName, String forPolicy) { + String enrichIndexBase = EnrichPolicy.getBaseName(forPolicy); + GetAliasesResponse getAliasesResponse = client().admin().indices().getAliases(new GetAliasesRequest(enrichIndexBase)).actionGet(); + IndicesAliasesRequest aliasToggleRequest = new IndicesAliasesRequest(); + String[] indices = getAliasesResponse.getAliases().keys().toArray(String.class); + if (indices.length > 0) { + aliasToggleRequest.addAliasAction(IndicesAliasesRequest.AliasActions.remove().indices(indices).alias(enrichIndexBase)); + } + aliasToggleRequest.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(indexName).alias(enrichIndexBase)); + client().admin().indices().aliases(aliasToggleRequest).actionGet(); + } + + private EnrichPolicyMaintenanceService createMaintenanceService() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + ThreadPool threadPool = getInstanceFromNode(ThreadPool.class); + // Extend the maintenance service to make the cleanUpEnrichIndices method a blocking method that waits for clean up to complete + return new EnrichPolicyMaintenanceService(Settings.EMPTY, client(), clusterService, threadPool, new EnrichPolicyLocks()) { + final Phaser completionBarrier = new Phaser(2); + + @Override + void cleanUpEnrichIndices() { + super.cleanUpEnrichIndices(); + completionBarrier.arriveAndAwaitAdvance(); + } + + @Override + void concludeMaintenance() { + super.concludeMaintenance(); + completionBarrier.arrive(); + } + }; + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java new file mode 100644 index 00000000000..b2c28da720a --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -0,0 +1,1497 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.segments.IndexSegments; +import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; +import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; +import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; +import org.elasticsearch.action.admin.indices.segments.ShardSegments; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.smile.SmileXContent; +import org.elasticsearch.index.engine.Segment; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.ReindexPlugin; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskAwareRequest; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; + +public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Arrays.asList(ReindexPlugin.class, IngestCommonPlugin.class); + } + + private static ThreadPool testThreadPool; + private static TaskManager testTaskManager; + + @BeforeClass + public static void beforeCLass() { + testThreadPool = new TestThreadPool("EnrichPolicyRunnerTests"); + testTaskManager = new TaskManager(Settings.EMPTY, testThreadPool, Collections.emptySet()); + } + + @AfterClass + public static void afterClass() { + ThreadPool.terminate(testThreadPool, 30, TimeUnit.SECONDS); + } + + public void testRunner() throws Exception { + final String sourceIndex = "source-index"; + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id("id") + .source( + "{" + + "\"field1\":\"value1\"," + + "\"field2\":2," + + "\"field3\":\"ignored\"," + + "\"field4\":\"ignored\"," + + "\"field5\":\"value5\"" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); + assertThat(sourceDocMap.get("field2"), is(equalTo(2))); + assertThat(sourceDocMap.get("field3"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field4"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field5"), is(equalTo("value5"))); + + List enrichFields = new ArrayList<>(); + enrichFields.add("field2"); + enrichFields.add("field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), + "field1", enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + validateMappingMetadata(mapping, policyName, policy); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map field1 = (Map) properties.get("field1"); + assertNotNull(field1); + assertThat(field1.get("type"), is(equalTo("keyword"))); + assertThat(field1.get("doc_values"), is(false)); + + // Validate document structure + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(3))); + assertThat(enrichDocument.get("field1"), is(equalTo("value1"))); + assertThat(enrichDocument.get("field2"), is(equalTo(2))); + assertThat(enrichDocument.get("field5"), is(equalTo("value5"))); + + // Validate segments + validateSegments(createdEnrichIndex, 1); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerGeoMatchType() throws Exception { + final String sourceIndex = "source-index"; + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id("id") + .source( + "{" + + "\"location\":" + + "\"POINT(10.0 10.0)\"," + + "\"zipcode\":90210" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("location"), is(equalTo("POINT(10.0 10.0)"))); + assertThat(sourceDocMap.get("zipcode"), is(equalTo(90210))); + + List enrichFields = Arrays.asList("zipcode"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.GEO_MATCH_TYPE, null, Arrays.asList(sourceIndex), "location", enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + validateMappingMetadata(mapping, policyName, policy); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map field1 = (Map) properties.get("location"); + assertNotNull(field1); + assertThat(field1.get("type"), is(equalTo("geo_shape"))); + assertNull(field1.get("doc_values")); + + // Validate document structure + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(2))); + assertThat(enrichDocument.get("location"), is(equalTo("POINT(10.0 10.0)"))); + assertThat(enrichDocument.get("zipcode"), is(equalTo(90210))); + + // Validate segments + validateSegments(createdEnrichIndex, 1); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerMultiSource() throws Exception { + String baseSourceName = "source-index-"; + int numberOfSourceIndices = 3; + for (int idx = 0; idx < numberOfSourceIndices; idx++) { + final String sourceIndex = baseSourceName + idx; + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id(randomAlphaOfLength(10)) + .source( + "{" + + "\"idx\":" + idx + "," + + "\"key\":" + "\"key" + idx + "\"," + + "\"field1\":\"value1\"," + + "\"field2\":2," + + "\"field3\":\"ignored\"," + + "\"field4\":\"ignored\"," + + "\"field5\":\"value5\"" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("idx"), is(equalTo(idx))); + assertThat(sourceDocMap.get("key"), is(equalTo("key" + idx))); + assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); + assertThat(sourceDocMap.get("field2"), is(equalTo(2))); + assertThat(sourceDocMap.get("field3"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field4"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field5"), is(equalTo("value5"))); + } + + String sourceIndexPattern = baseSourceName + "*"; + List enrichFields = new ArrayList<>(); + enrichFields.add("idx"); + enrichFields.add("field1"); + enrichFields.add("field2"); + enrichFields.add("field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndexPattern), + "key", enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + validateMappingMetadata(mapping, policyName, policy); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map keyfield = (Map) properties.get("key"); + assertNotNull(keyfield); + assertThat(keyfield.get("type"), is(equalTo("keyword"))); + assertThat(keyfield.get("doc_values"), is(false)); + + // Validate document structure + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(5))); + assertThat(enrichDocument.get("key"), is(equalTo("key0"))); + assertThat(enrichDocument.get("field1"), is(equalTo("value1"))); + assertThat(enrichDocument.get("field2"), is(equalTo(2))); + assertThat(enrichDocument.get("field5"), is(equalTo("value5"))); + + // Validate segments + validateSegments(createdEnrichIndex, 3); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerMultiSourceDocIdCollisions() throws Exception { + String baseSourceName = "source-index-"; + int numberOfSourceIndices = 3; + String collidingDocId = randomAlphaOfLength(10); + for (int idx = 0; idx < numberOfSourceIndices; idx++) { + final String sourceIndex = baseSourceName + idx; + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id(collidingDocId) + .routing(collidingDocId + idx) + .source( + "{" + + "\"idx\":" + idx + "," + + "\"key\":" + "\"key" + idx + "\"," + + "\"field1\":\"value1\"," + + "\"field2\":2," + + "\"field3\":\"ignored\"," + + "\"field4\":\"ignored\"," + + "\"field5\":\"value5\"" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("idx"), is(equalTo(idx))); + assertThat(sourceDocMap.get("key"), is(equalTo("key" + idx))); + assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); + assertThat(sourceDocMap.get("field2"), is(equalTo(2))); + assertThat(sourceDocMap.get("field3"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field4"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field5"), is(equalTo("value5"))); + + SearchResponse routingSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchQuery("_routing", collidingDocId + idx)))).actionGet(); + assertEquals(1L, routingSearchResponse.getHits().getTotalHits().value); + } + + String sourceIndexPattern = baseSourceName + "*"; + List enrichFields = Arrays.asList("idx", "field1", "field2", "field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndexPattern), "key", + enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map keyfield = (Map) properties.get("key"); + assertNotNull(keyfield); + assertThat(keyfield.get("type"), is(equalTo("keyword"))); + assertThat(keyfield.get("doc_values"), is(false)); + + // Validate document structure + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(5))); + assertThat(enrichDocument.get("key"), is(equalTo("key0"))); + assertThat(enrichDocument.get("field1"), is(equalTo("value1"))); + assertThat(enrichDocument.get("field2"), is(equalTo(2))); + assertThat(enrichDocument.get("field5"), is(equalTo("value5"))); + + // Validate removal of routing values + for (int idx = 0; idx < numberOfSourceIndices; idx++) { + SearchResponse routingSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchQuery("_routing", collidingDocId + idx)))).actionGet(); + assertEquals(0L, routingSearchResponse.getHits().getTotalHits().value); + } + + // Validate segments + validateSegments(createdEnrichIndex, 3); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerMultiSourceEnrichKeyCollisions() throws Exception { + String baseSourceName = "source-index-"; + int numberOfSourceIndices = 3; + for (int idx = 0; idx < numberOfSourceIndices; idx++) { + final String sourceIndex = baseSourceName + idx; + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id(randomAlphaOfLength(10)) + .source( + "{" + + "\"idx\":" + idx + "," + + "\"key\":" + "\"key\"," + + "\"field1\":\"value1\"," + + "\"field2\":2," + + "\"field3\":\"ignored\"," + + "\"field4\":\"ignored\"," + + "\"field5\":\"value5\"" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("idx"), is(equalTo(idx))); + assertThat(sourceDocMap.get("key"), is(equalTo("key"))); + assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); + assertThat(sourceDocMap.get("field2"), is(equalTo(2))); + assertThat(sourceDocMap.get("field3"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field4"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field5"), is(equalTo("value5"))); + } + + String sourceIndexPattern = baseSourceName + "*"; + List enrichFields = Arrays.asList("idx", "field1", "field2", "field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndexPattern), "key", + enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map keyfield = (Map) properties.get("key"); + assertNotNull(keyfield); + assertThat(keyfield.get("type"), is(equalTo("keyword"))); + assertThat(keyfield.get("doc_values"), is(false)); + + // Validate document structure + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(5))); + assertThat(enrichDocument.get("key"), is(equalTo("key"))); + assertThat(enrichDocument.get("field1"), is(equalTo("value1"))); + assertThat(enrichDocument.get("field2"), is(equalTo(2))); + assertThat(enrichDocument.get("field5"), is(equalTo("value5"))); + + // Validate segments + validateSegments(createdEnrichIndex, 3); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerNoSourceIndex() throws Exception { + final String sourceIndex = "source-index"; + + List enrichFields = new ArrayList<>(); + enrichFields.add("field2"); + enrichFields.add("field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), "field1", + enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + Exception thrown = exception.get(); + assertThat(thrown, instanceOf(IndexNotFoundException.class)); + assertThat(thrown.getMessage(), containsString("no such index [" + sourceIndex + "]")); + } else { + fail("Expected exception but nothing was thrown"); + } + } + + public void testRunnerNoSourceMapping() throws Exception { + final String sourceIndex = "source-index"; + CreateIndexResponse createResponse = client().admin().indices().create(new CreateIndexRequest(sourceIndex)).actionGet(); + assertTrue(createResponse.isAcknowledged()); + + List enrichFields = new ArrayList<>(); + enrichFields.add("field2"); + enrichFields.add("field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), "field1", + enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + Exception thrown = exception.get(); + assertThat(thrown, instanceOf(ElasticsearchException.class)); + assertThat(thrown.getMessage(), containsString("Enrich policy execution for [" + policyName + + "] failed. No mapping available on source [" + sourceIndex + "] included in [[" + sourceIndex + "]]")); + } else { + fail("Expected exception but nothing was thrown"); + } + } + + public void testRunnerKeyNestedSourceMapping() throws Exception { + final String sourceIndex = "source-index"; + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("nesting") + .field("type", "nested") + .startObject("properties") + .startObject("key") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .startObject("field2") + .field("type", "integer") + .endObject() + .endObject() + .endObject() + .endObject(); + CreateIndexResponse createResponse = client().admin().indices().create(new CreateIndexRequest(sourceIndex) + .mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder)).actionGet(); + assertTrue(createResponse.isAcknowledged()); + + String policyName = "test1"; + List enrichFields = Collections.singletonList("field2"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), + "nesting.key", enrichFields); + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + Exception thrown = exception.get(); + assertThat(thrown, instanceOf(ElasticsearchException.class)); + assertThat(thrown.getMessage(), containsString("Enrich policy execution for [" + policyName + + "] failed while validating field mappings for index [" + sourceIndex + "]")); + assertThat(thrown.getCause().getMessage(), containsString("Could not traverse mapping to field [nesting.key]. The [nesting" + + "] field must be regular object but was [nested].")); + } else { + fail("Expected exception but nothing was thrown"); + } + } + + public void testRunnerValueNestedSourceMapping() throws Exception { + final String sourceIndex = "source-index"; + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("key") + .field("type", "keyword") + .endObject() + .startObject("nesting") + .field("type", "nested") + .startObject("properties") + .startObject("field2") + .field("type", "integer") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + CreateIndexResponse createResponse = client().admin().indices().create(new CreateIndexRequest(sourceIndex) + .mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder)).actionGet(); + assertTrue(createResponse.isAcknowledged()); + + String policyName = "test1"; + List enrichFields = new ArrayList<>(); + enrichFields.add("nesting.field2"); + enrichFields.add("missingField"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), + "key", enrichFields); + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + Exception thrown = exception.get(); + assertThat(thrown, instanceOf(ElasticsearchException.class)); + assertThat(thrown.getMessage(), containsString("Enrich policy execution for [" + policyName + + "] failed while validating field mappings for index [" + sourceIndex + "]")); + assertThat(thrown.getCause().getMessage(), containsString("Could not traverse mapping to field [nesting.field2]. " + + "The [nesting] field must be regular object but was [nested].")); + } else { + fail("Expected exception but nothing was thrown"); + } + } + + public void testRunnerObjectSourceMapping() throws Exception { + final String sourceIndex = "source-index"; + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("data") + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .startObject("field2") + .field("type", "integer") + .endObject() + .startObject("field3") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + CreateIndexResponse createResponse = client().admin().indices().create(new CreateIndexRequest(sourceIndex) + .mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder)).actionGet(); + assertTrue(createResponse.isAcknowledged()); + + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id("id") + .source( + "{" + + "\"data\":{" + + "\"field1\":\"value1\"," + + "\"field2\":2," + + "\"field3\":\"ignored\"" + + "}" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + Map dataField = ((Map) sourceDocMap.get("data")); + assertNotNull(dataField); + assertThat(dataField.get("field1"), is(equalTo("value1"))); + assertThat(dataField.get("field2"), is(equalTo(2))); + assertThat(dataField.get("field3"), is(equalTo("ignored"))); + + String policyName = "test1"; + List enrichFields = new ArrayList<>(); + enrichFields.add("data.field2"); + enrichFields.add("missingField"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), + "data.field1", enrichFields); + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + validateMappingMetadata(mapping, policyName, policy); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map data = (Map) properties.get("data"); + assertNotNull(data); + assertThat(data.size(), is(equalTo(1))); + Map dataProperties = (Map) data.get("properties"); + assertNotNull(dataProperties); + assertThat(dataProperties.size(), is(equalTo(1))); + Map field1 = (Map) dataProperties.get("field1"); + assertNotNull(field1); + assertThat(field1.get("type"), is(equalTo("keyword"))); + assertThat(field1.get("doc_values"), is(false)); + + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(1))); + Map resultDataField = ((Map) enrichDocument.get("data")); + assertNotNull(resultDataField); + assertThat(resultDataField.size(), is(equalTo(2))); + assertThat(resultDataField.get("field1"), is(equalTo("value1"))); + assertThat(resultDataField.get("field2"), is(equalTo(2))); + assertNull(resultDataField.get("field3")); + + // Validate segments + validateSegments(createdEnrichIndex, 1); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerExplicitObjectSourceMapping() throws Exception { + final String sourceIndex = "source-index"; + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("data") + .field("type", "object") + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .startObject("field2") + .field("type", "integer") + .endObject() + .startObject("field3") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + CreateIndexResponse createResponse = client().admin().indices().create(new CreateIndexRequest(sourceIndex) + .mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder)).actionGet(); + assertTrue(createResponse.isAcknowledged()); + + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id("id") + .source( + "{" + + "\"data\":{" + + "\"field1\":\"value1\"," + + "\"field2\":2," + + "\"field3\":\"ignored\"" + + "}" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + Map dataField = ((Map) sourceDocMap.get("data")); + assertNotNull(dataField); + assertThat(dataField.get("field1"), is(equalTo("value1"))); + assertThat(dataField.get("field2"), is(equalTo(2))); + assertThat(dataField.get("field3"), is(equalTo("ignored"))); + + String policyName = "test1"; + List enrichFields = new ArrayList<>(); + enrichFields.add("data.field2"); + enrichFields.add("missingField"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), + "data.field1", enrichFields); + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + validateMappingMetadata(mapping, policyName, policy); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map data = (Map) properties.get("data"); + assertNotNull(data); + assertThat(data.size(), is(equalTo(1))); + Map dataProperties = (Map) data.get("properties"); + assertNotNull(dataProperties); + assertThat(dataProperties.size(), is(equalTo(1))); + Map field1 = (Map) dataProperties.get("field1"); + assertNotNull(field1); + assertThat(field1.get("type"), is(equalTo("keyword"))); + assertThat(field1.get("doc_values"), is(false)); + + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(1))); + Map resultDataField = ((Map) enrichDocument.get("data")); + assertNotNull(resultDataField); + assertThat(resultDataField.size(), is(equalTo(2))); + assertThat(resultDataField.get("field1"), is(equalTo("value1"))); + assertThat(resultDataField.get("field2"), is(equalTo(2))); + assertNull(resultDataField.get("field3")); + + // Validate segments + validateSegments(createdEnrichIndex, 1); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerTwoObjectLevelsSourceMapping() throws Exception { + final String sourceIndex = "source-index"; + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("data") + .startObject("properties") + .startObject("fields") + .startObject("properties") + .startObject("field1") + .field("type", "keyword") + .endObject() + .startObject("field2") + .field("type", "integer") + .endObject() + .startObject("field3") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + CreateIndexResponse createResponse = client().admin().indices().create(new CreateIndexRequest(sourceIndex) + .mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder)).actionGet(); + assertTrue(createResponse.isAcknowledged()); + + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id("id") + .source( + "{" + + "\"data\":{" + + "\"fields\":{" + + "\"field1\":\"value1\"," + + "\"field2\":2," + + "\"field3\":\"ignored\"" + + "}" + + "}" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + Map dataField = ((Map) sourceDocMap.get("data")); + assertNotNull(dataField); + Map fieldsField = ((Map) dataField.get("fields")); + assertNotNull(fieldsField); + assertThat(fieldsField.get("field1"), is(equalTo("value1"))); + assertThat(fieldsField.get("field2"), is(equalTo(2))); + assertThat(fieldsField.get("field3"), is(equalTo("ignored"))); + + String policyName = "test1"; + List enrichFields = new ArrayList<>(); + enrichFields.add("data.fields.field2"); + enrichFields.add("missingField"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), + "data.fields.field1", enrichFields); + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + validateMappingMetadata(mapping, policyName, policy); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map data = (Map) properties.get("data"); + assertNotNull(data); + assertThat(data.size(), is(equalTo(1))); + Map dataProperties = (Map) data.get("properties"); + assertNotNull(dataProperties); + assertThat(dataProperties.size(), is(equalTo(1))); + Map fields = (Map) dataProperties.get("fields"); + assertNotNull(fields); + assertThat(fields.size(), is(equalTo(1))); + Map fieldsProperties = (Map) fields.get("properties"); + assertNotNull(fieldsProperties); + assertThat(fieldsProperties.size(), is(equalTo(1))); + Map field1 = (Map) fieldsProperties.get("field1"); + assertNotNull(field1); + assertThat(field1.get("type"), is(equalTo("keyword"))); + assertThat(field1.get("doc_values"), is(false)); + + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(1))); + Map resultDataField = ((Map) enrichDocument.get("data")); + assertNotNull(resultDataField); + Map resultFieldsField = ((Map) resultDataField.get("fields")); + assertNotNull(resultFieldsField); + assertThat(resultFieldsField.size(), is(equalTo(2))); + assertThat(resultFieldsField.get("field1"), is(equalTo("value1"))); + assertThat(resultFieldsField.get("field2"), is(equalTo(2))); + assertNull(resultFieldsField.get("field3")); + + // Validate segments + validateSegments(createdEnrichIndex, 1); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerDottedKeyNameSourceMapping() throws Exception { + final String sourceIndex = "source-index"; + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("data.field1") + .field("type", "keyword") + .endObject() + .startObject("data.field2") + .field("type", "integer") + .endObject() + .startObject("data.field3") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject(); + CreateIndexResponse createResponse = client().admin().indices().create(new CreateIndexRequest(sourceIndex) + .mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder)).actionGet(); + assertTrue(createResponse.isAcknowledged()); + + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id("id") + .source( + "{" + + "\"data.field1\":\"value1\"," + + "\"data.field2\":2," + + "\"data.field3\":\"ignored\"" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("data.field1"), is(equalTo("value1"))); + assertThat(sourceDocMap.get("data.field2"), is(equalTo(2))); + assertThat(sourceDocMap.get("data.field3"), is(equalTo("ignored"))); + + String policyName = "test1"; + List enrichFields = new ArrayList<>(); + enrichFields.add("data.field2"); + enrichFields.add("missingField"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), + "data.field1", enrichFields); + + final long createTime = randomNonNegativeLong(); + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + EnrichPolicyRunner enrichPolicyRunner = createPolicyRunner(policyName, policy, listener, createTime); + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate Index definition + String createdEnrichIndex = ".enrich-test1-" + createTime; + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + validateMappingMetadata(mapping, policyName, policy); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map data = (Map) properties.get("data"); + assertNotNull(data); + assertThat(data.size(), is(equalTo(1))); + Map dataProperties = (Map) data.get("properties"); + assertNotNull(dataProperties); + assertThat(dataProperties.size(), is(equalTo(1))); + Map field1 = (Map) dataProperties.get("field1"); + assertNotNull(field1); + assertThat(field1.get("type"), is(equalTo("keyword"))); + assertThat(field1.get("doc_values"), is(false)); + + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(2))); + assertThat(enrichDocument.get("data.field1"), is(equalTo("value1"))); + assertThat(enrichDocument.get("data.field2"), is(equalTo(2))); + assertNull(enrichDocument.get("data.field3")); + + // Validate segments + validateSegments(createdEnrichIndex, 1); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + public void testRunnerWithForceMergeRetry() throws Exception { + final String sourceIndex = "source-index"; + IndexResponse indexRequest = client().index(new IndexRequest() + .index(sourceIndex) + .id("id") + .source( + "{" + + "\"field1\":\"value1\"," + + "\"field2\":2," + + "\"field3\":\"ignored\"," + + "\"field4\":\"ignored\"," + + "\"field5\":\"value5\"" + + "}", + XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + + SearchResponse sourceSearchResponse = client().search( + new SearchRequest(sourceIndex) + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); + assertThat(sourceDocMap.get("field2"), is(equalTo(2))); + assertThat(sourceDocMap.get("field3"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field4"), is(equalTo("ignored"))); + assertThat(sourceDocMap.get("field5"), is(equalTo("value5"))); + + List enrichFields = Arrays.asList("field2", "field5"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList(sourceIndex), "field1", + enrichFields); + String policyName = "test1"; + + final long createTime = randomNonNegativeLong(); + String createdEnrichIndex = ".enrich-test1-" + createTime; + final AtomicReference exception = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = createTestListener(latch, exception::set); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + IndexNameExpressionResolver resolver = getInstanceFromNode(IndexNameExpressionResolver.class); + Task asyncTask = testTaskManager.register("enrich", "policy_execution", new TaskAwareRequest() { + @Override + public void setParentTask(TaskId taskId) {} + + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new ExecuteEnrichPolicyTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return policyName; + } + }); + ExecuteEnrichPolicyTask task = ((ExecuteEnrichPolicyTask) asyncTask); + // The executor would wrap the listener in order to clean up the task in the + // task manager, but we're just testing the runner, so we make sure to clean + // up after ourselves. + ActionListener wrappedListener = new ActionListener() { + @Override + public void onResponse(ExecuteEnrichPolicyStatus policyExecutionResult) { + testTaskManager.unregister(task); + listener.onResponse(policyExecutionResult); + } + + @Override + public void onFailure(Exception e) { + testTaskManager.unregister(task); + listener.onFailure(e); + } + }; + AtomicInteger forceMergeAttempts = new AtomicInteger(0); + final XContentBuilder unmergedDocument = SmileXContent.contentBuilder() + .startObject().field("field1", "value1.1").field("field2", 2).field("field5", "value5").endObject(); + EnrichPolicyRunner enrichPolicyRunner = new EnrichPolicyRunner(policyName, policy, task, wrappedListener, clusterService, client(), + resolver, () -> createTime, randomIntBetween(1, 10000), randomIntBetween(3, 10)) { + @Override + protected void ensureSingleSegment(String destinationIndexName, int attempt) { + forceMergeAttempts.incrementAndGet(); + if (attempt == 1) { + // Put and flush a document to increase the number of segments, simulating not + // all segments were merged on the first try. + IndexResponse indexRequest = client().index(new IndexRequest() + .index(createdEnrichIndex) + .source(unmergedDocument) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + ).actionGet(); + assertEquals(RestStatus.CREATED, indexRequest.status()); + } + super.ensureSingleSegment(destinationIndexName, attempt); + } + }; + + logger.info("Starting policy run"); + enrichPolicyRunner.run(); + latch.await(); + if (exception.get() != null) { + throw exception.get(); + } + + // Validate number of force merges + assertThat(forceMergeAttempts.get(), equalTo(2)); + + // Validate Index definition + GetIndexResponse enrichIndex = client().admin().indices().getIndex(new GetIndexRequest().indices(".enrich-test1")).actionGet(); + assertThat(enrichIndex.getIndices().length, equalTo(1)); + assertThat(enrichIndex.getIndices()[0], equalTo(createdEnrichIndex)); + Settings settings = enrichIndex.getSettings().get(createdEnrichIndex); + assertNotNull(settings); + assertThat(settings.get("index.auto_expand_replicas"), is(equalTo("0-all"))); + + // Validate Mapping + Map mapping = enrichIndex.getMappings().get(createdEnrichIndex).get("_doc").sourceAsMap(); + validateMappingMetadata(mapping, policyName, policy); + assertThat(mapping.get("dynamic"), is("false")); + Map properties = (Map) mapping.get("properties"); + assertNotNull(properties); + assertThat(properties.size(), is(equalTo(1))); + Map field1 = (Map) properties.get("field1"); + assertNotNull(field1); + assertThat(field1.get("type"), is(equalTo("keyword"))); + assertThat(field1.get("doc_values"), is(false)); + + // Validate document structure + SearchResponse allEnrichDocs = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchAllQuery()))).actionGet(); + assertThat(allEnrichDocs.getHits().getTotalHits().value, equalTo(2L)); + for (String keyValue : Arrays.asList("value1", "value1.1")) { + SearchResponse enrichSearchResponse = client().search( + new SearchRequest(".enrich-test1") + .source(SearchSourceBuilder.searchSource() + .query(QueryBuilders.matchQuery("field1", keyValue)))).actionGet(); + + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(3))); + assertThat(enrichDocument.get("field1"), is(equalTo(keyValue))); + assertThat(enrichDocument.get("field2"), is(equalTo(2))); + assertThat(enrichDocument.get("field5"), is(equalTo("value5"))); + } + + // Validate segments + validateSegments(createdEnrichIndex, 2); + + // Validate Index is read only + ensureEnrichIndexIsReadOnly(createdEnrichIndex); + } + + private EnrichPolicyRunner createPolicyRunner(String policyName, EnrichPolicy policy, + ActionListener listener, Long createTime) { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + IndexNameExpressionResolver resolver = getInstanceFromNode(IndexNameExpressionResolver.class); + Task asyncTask = testTaskManager.register("enrich", "policy_execution", new TaskAwareRequest() { + @Override + public void setParentTask(TaskId taskId) {} + + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new ExecuteEnrichPolicyTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + return policyName; + } + }); + ExecuteEnrichPolicyTask task = ((ExecuteEnrichPolicyTask) asyncTask); + // The executor would wrap the listener in order to clean up the task in the + // task manager, but we're just testing the runner, so we make sure to clean + // up after ourselves. + ActionListener wrappedListener = new ActionListener() { + @Override + public void onResponse(ExecuteEnrichPolicyStatus policyExecutionResult) { + testTaskManager.unregister(task); + listener.onResponse(policyExecutionResult); + } + + @Override + public void onFailure(Exception e) { + testTaskManager.unregister(task); + listener.onFailure(e); + } + }; + return new EnrichPolicyRunner(policyName, policy, task, wrappedListener, clusterService, client(), resolver, () -> createTime, + randomIntBetween(1, 10000), randomIntBetween(1, 10)); + } + + private ActionListener createTestListener(final CountDownLatch latch, + final Consumer exceptionConsumer) { + return new LatchedActionListener<>(ActionListener.wrap((r) -> logger.info("Run complete"), exceptionConsumer), latch); + } + + private void validateMappingMetadata(Map mapping, String policyName, EnrichPolicy policy) { + Object metadata = mapping.get("_meta"); + assertThat(metadata, is(notNullValue())); + Map metadataMap = (Map) metadata; + assertThat(metadataMap.get(EnrichPolicyRunner.ENRICH_README_FIELD_NAME), equalTo(EnrichPolicyRunner.ENRICH_INDEX_README_TEXT)); + assertThat(metadataMap.get(EnrichPolicyRunner.ENRICH_POLICY_NAME_FIELD_NAME), equalTo(policyName)); + assertThat(metadataMap.get(EnrichPolicyRunner.ENRICH_MATCH_FIELD_NAME), equalTo(policy.getMatchField())); + assertThat(metadataMap.get(EnrichPolicyRunner.ENRICH_POLICY_TYPE_FIELD_NAME), equalTo(policy.getType())); + } + + private void validateSegments(String createdEnrichIndex, int expectedDocs) { + IndicesSegmentResponse indicesSegmentResponse = client().admin().indices() + .segments(new IndicesSegmentsRequest(createdEnrichIndex)).actionGet(); + IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(createdEnrichIndex); + assertNotNull(indexSegments); + assertThat(indexSegments.getShards().size(), is(equalTo(1))); + IndexShardSegments shardSegments = indexSegments.getShards().get(0); + assertNotNull(shardSegments); + assertThat(shardSegments.getShards().length, is(equalTo(1))); + ShardSegments shard = shardSegments.getShards()[0]; + assertThat(shard.getSegments().size(), is(equalTo(1))); + Segment segment = shard.getSegments().iterator().next(); + assertThat(segment.getNumDocs(), is(equalTo(expectedDocs))); + } + + private void ensureEnrichIndexIsReadOnly(String createdEnrichIndex) { + ElasticsearchException expected = expectThrows(ElasticsearchException.class, () -> client().index(new IndexRequest() + .index(createdEnrichIndex) + .id(randomAlphaOfLength(10)) + .source(Collections.singletonMap(randomAlphaOfLength(6), randomAlphaOfLength(10)))).actionGet()); + + assertThat(expected.getMessage(), containsString("index [" + createdEnrichIndex + + "] blocked by: [FORBIDDEN/8/index write (api)]")); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyTests.java new file mode 100644 index 00000000000..3c87867f9df --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyTests.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class EnrichPolicyTests extends AbstractSerializingTestCase { + + @Override + protected EnrichPolicy doParseInstance(XContentParser parser) throws IOException { + return EnrichPolicy.fromXContent(parser); + } + + @Override + protected EnrichPolicy createTestInstance() { + return randomEnrichPolicy(randomFrom(XContentType.values())); + } + + @Override + protected EnrichPolicy createXContextTestInstance(XContentType xContentType) { + return randomEnrichPolicy(xContentType); + } + + public static EnrichPolicy randomEnrichPolicy(XContentType xContentType) { + final QueryBuilder queryBuilder; + if (randomBoolean()) { + queryBuilder = new MatchAllQueryBuilder(); + } else { + queryBuilder = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(4)); + } + + final ByteArrayOutputStream out = new ByteArrayOutputStream(); + try (XContentBuilder xContentBuilder = XContentFactory.contentBuilder(xContentType, out)) { + XContentBuilder content = queryBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + content.flush(); + EnrichPolicy.QuerySource querySource = new EnrichPolicy.QuerySource(new BytesArray(out.toByteArray()), content.contentType()); + return new EnrichPolicy( + randomFrom(EnrichPolicy.SUPPORTED_POLICY_TYPES), + randomBoolean() ? querySource : null, + Arrays.asList(generateRandomStringArray(8, 4, false, false)), + randomAlphaOfLength(4), + Arrays.asList(generateRandomStringArray(8, 4, false, false)) + ); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + } + + @Override + protected Writeable.Reader instanceReader() { + return EnrichPolicy::new; + } + + @Override + protected void assertEqualInstances(EnrichPolicy expectedInstance, EnrichPolicy newInstance) { + assertNotSame(expectedInstance, newInstance); + assertEqualPolicies(expectedInstance, newInstance); + } + + public static void assertEqualPolicies(EnrichPolicy expectedInstance, EnrichPolicy newInstance) { + assertThat(newInstance.getType(), equalTo(expectedInstance.getType())); + if (newInstance.getQuery() != null) { + // testFromXContent, always shuffles the xcontent and then byte wise the query is different, so we check the parsed version: + assertThat(newInstance.getQuery().getQueryAsMap(), equalTo(expectedInstance.getQuery().getQueryAsMap())); + } else { + assertThat(expectedInstance.getQuery(), nullValue()); + } + assertThat(newInstance.getIndices(), equalTo(expectedInstance.getIndices())); + assertThat(newInstance.getMatchField(), equalTo(expectedInstance.getMatchField())); + assertThat(newInstance.getEnrichFields(), equalTo(expectedInstance.getEnrichFields())); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyUpdateTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyUpdateTests.java new file mode 100644 index 00000000000..5663b98aee4 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyUpdateTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.reindex.ReindexPlugin; +import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class EnrichPolicyUpdateTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Arrays.asList(LocalStateEnrich.class, ReindexPlugin.class, IngestCommonPlugin.class); + } + + public void testUpdatePolicyOnly() { + IngestService ingestService = getInstanceFromNode(IngestService.class); + createIndex("index", Settings.EMPTY, "_doc", "key1", "type=keyword", "field1", "type=keyword"); + + EnrichPolicy instance1 = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList("index"), + "key1", Collections.singletonList("field1")); + PutEnrichPolicyAction.Request putPolicyRequest = new PutEnrichPolicyAction.Request("my_policy", instance1); + assertAcked(client().execute(PutEnrichPolicyAction.INSTANCE, putPolicyRequest).actionGet()); + assertThat("Execute failed", client().execute(ExecuteEnrichPolicyAction.INSTANCE, + new ExecuteEnrichPolicyAction.Request("my_policy")).actionGet().getStatus().isCompleted(), equalTo(true)); + + String pipelineConfig = + "{\"processors\":[{\"enrich\": {\"policy_name\": \"my_policy\", \"field\": \"key\", \"target_field\": \"target\"}}]}"; + PutPipelineRequest putPipelineRequest = new PutPipelineRequest("1", new BytesArray(pipelineConfig), XContentType.JSON); + assertAcked(client().admin().cluster().putPipeline(putPipelineRequest).actionGet()); + Pipeline pipelineInstance1 = ingestService.getPipeline("1"); + assertThat(pipelineInstance1.getProcessors().get(0), instanceOf(MatchProcessor.class)); + + EnrichPolicy instance2 = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList("index"), + "key2", Collections.singletonList("field2")); + ResourceAlreadyExistsException exc = expectThrows(ResourceAlreadyExistsException.class, () -> + client().execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request("my_policy", instance2)).actionGet()); + assertTrue(exc.getMessage().contains("policy [my_policy] already exists")); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java new file mode 100644 index 00000000000..dc71c80f37c --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java @@ -0,0 +1,231 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class EnrichProcessorFactoryTests extends ESTestCase { + + public void testCreateProcessorInstance() throws Exception { + List enrichValues = Arrays.asList("globalRank", "tldRank", "tld"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Collections.singletonList("source_index"), "my_key", + enrichValues); + EnrichProcessorFactory factory = new EnrichProcessorFactory(null); + factory.metaData = createMetaData("majestic", policy); + + Map config = new HashMap<>(); + config.put("policy_name", "majestic"); + config.put("field", "host"); + config.put("target_field", "entry"); + boolean keyIgnoreMissing = randomBoolean(); + if (keyIgnoreMissing || randomBoolean()) { + config.put("ignore_missing", keyIgnoreMissing); + } + + Boolean overrideEnabled = randomBoolean() ? null : randomBoolean(); + if (overrideEnabled != null) { + config.put("override", overrideEnabled); + } + + Integer maxMatches = null; + if (randomBoolean()) { + maxMatches = randomIntBetween(1, 128); + config.put("max_matches", maxMatches); + } + + int numRandomValues = randomIntBetween(1, 8); + List> randomValues = new ArrayList<>(numRandomValues); + for (int i = 0; i < numRandomValues; i++) { + randomValues.add(new Tuple<>(randomFrom(enrichValues), randomAlphaOfLength(4))); + } + + MatchProcessor result = (MatchProcessor) factory.create(Collections.emptyMap(), "_tag", config); + assertThat(result, notNullValue()); + assertThat(result.getPolicyName(), equalTo("majestic")); + assertThat(result.getField(), equalTo("host")); + assertThat(result.getTargetField(), equalTo("entry")); + assertThat(result.getMatchField(), equalTo("my_key")); + assertThat(result.isIgnoreMissing(), is(keyIgnoreMissing)); + if (overrideEnabled != null) { + assertThat(result.isOverrideEnabled(), is(overrideEnabled)); + } else { + assertThat(result.isOverrideEnabled(), is(true)); + } + if (maxMatches != null) { + assertThat(result.getMaxMatches(), equalTo(maxMatches)); + } else { + assertThat(result.getMaxMatches(), equalTo(1)); + } + } + + public void testPolicyDoesNotExist() { + List enrichValues = Arrays.asList("globalRank", "tldRank", "tld"); + EnrichProcessorFactory factory = new EnrichProcessorFactory(null); + factory.metaData = MetaData.builder().build(); + + Map config = new HashMap<>(); + config.put("policy_name", "majestic"); + config.put("enrich_key", "host"); + boolean keyIgnoreMissing = randomBoolean(); + if (keyIgnoreMissing || randomBoolean()) { + config.put("ignore_missing", keyIgnoreMissing); + } + + int numRandomValues = randomIntBetween(1, 8); + List> randomValues = new ArrayList<>(numRandomValues); + for (int i = 0; i < numRandomValues; i++) { + randomValues.add(new Tuple<>(randomFrom(enrichValues), randomAlphaOfLength(4))); + } + + List> valuesConfig = new ArrayList<>(numRandomValues); + for (Tuple tuple : randomValues) { + Map entry = new HashMap<>(); + entry.put("source", tuple.v1()); + entry.put("target", tuple.v2()); + valuesConfig.add(entry); + } + config.put("set_from", valuesConfig); + + Exception e = expectThrows(IllegalArgumentException.class, () -> factory.create(Collections.emptyMap(), "_tag", config)); + assertThat(e.getMessage(), equalTo("no enrich index exists for policy with name [majestic]")); + } + + public void testPolicyNameMissing() { + List enrichValues = Arrays.asList("globalRank", "tldRank", "tld"); + EnrichProcessorFactory factory = new EnrichProcessorFactory(null); + + Map config = new HashMap<>(); + config.put("enrich_key", "host"); + boolean keyIgnoreMissing = randomBoolean(); + if (keyIgnoreMissing || randomBoolean()) { + config.put("ignore_missing", keyIgnoreMissing); + } + + int numRandomValues = randomIntBetween(1, 8); + List> randomValues = new ArrayList<>(numRandomValues); + for (int i = 0; i < numRandomValues; i++) { + randomValues.add(new Tuple<>(randomFrom(enrichValues), randomAlphaOfLength(4))); + } + + List> valuesConfig = new ArrayList<>(numRandomValues); + for (Tuple tuple : randomValues) { + Map entry = new HashMap<>(); + entry.put("source", tuple.v1()); + entry.put("target", tuple.v2()); + valuesConfig.add(entry); + } + config.put("set_from", valuesConfig); + + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(Collections.emptyMap(), "_tag", config)); + assertThat(e.getMessage(), equalTo("[policy_name] required property is missing")); + } + + public void testUnsupportedPolicy() throws Exception { + List enrichValues = Arrays.asList("globalRank", "tldRank", "tld"); + EnrichPolicy policy = + new EnrichPolicy("unsupported", null, Collections.singletonList("source_index"), "my_key", enrichValues); + EnrichProcessorFactory factory = new EnrichProcessorFactory(null); + factory.metaData = createMetaData("majestic", policy); + + Map config = new HashMap<>(); + config.put("policy_name", "majestic"); + config.put("field", "host"); + config.put("target_field", "entry"); + boolean keyIgnoreMissing = randomBoolean(); + if (keyIgnoreMissing || randomBoolean()) { + config.put("ignore_missing", keyIgnoreMissing); + } + + Exception e = expectThrows(IllegalArgumentException.class, () -> factory.create(Collections.emptyMap(), "_tag", config)); + assertThat(e.getMessage(), equalTo("unsupported policy type [unsupported]")); + } + + public void testCompactEnrichValuesFormat() throws Exception { + List enrichValues = Arrays.asList("globalRank", "tldRank", "tld"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, + Collections.singletonList("source_index"), "host", enrichValues); + EnrichProcessorFactory factory = new EnrichProcessorFactory(null); + factory.metaData = createMetaData("majestic", policy); + + Map config = new HashMap<>(); + config.put("policy_name", "majestic"); + config.put("field", "host"); + config.put("target_field", "entry"); + + MatchProcessor result = (MatchProcessor) factory.create(Collections.emptyMap(), "_tag", config); + assertThat(result, notNullValue()); + assertThat(result.getPolicyName(), equalTo("majestic")); + assertThat(result.getField(), equalTo("host")); + assertThat(result.getTargetField(), equalTo("entry")); + } + + public void testNoTargetField() throws Exception { + List enrichValues = Arrays.asList("globalRank", "tldRank", "tld"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, + Collections.singletonList("source_index"), "host", enrichValues); + EnrichProcessorFactory factory = new EnrichProcessorFactory(null); + factory.metaData = createMetaData("majestic", policy); + + Map config1 = new HashMap<>(); + config1.put("policy_name", "majestic"); + config1.put("field", "host"); + + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(Collections.emptyMap(), "_tag", config1)); + assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); + } + + public void testIllegalMaxMatches() throws Exception { + List enrichValues = Arrays.asList("globalRank", "tldRank", "tld"); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, Arrays.asList("source_index"), "my_key", + enrichValues); + EnrichProcessorFactory factory = new EnrichProcessorFactory(null); + factory.metaData = createMetaData("majestic", policy); + + Map config = new HashMap<>(); + config.put("policy_name", "majestic"); + config.put("field", "host"); + config.put("target_field", "entry"); + config.put("max_matches", randomBoolean() ? between(-2048, 0) : between(129, 2048)); + + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(Collections.emptyMap(), "_tag", config)); + assertThat(e.getMessage(), equalTo("[max_matches] should be between 1 and 128")); + } + + static MetaData createMetaData(String name, EnrichPolicy policy) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + IndexMetaData.Builder builder = IndexMetaData.builder(EnrichPolicy.getBaseName(name) + "-1"); + builder.settings(settings); + builder.putMapping("_doc", "{\"_meta\": {\"enrich_match_field\": \"" + policy.getMatchField() + + "\", \"enrich_policy_type\": \"" + policy.getType() + "\"}}"); + builder.putAlias(AliasMetaData.builder(EnrichPolicy.getBaseName(name)).build()); + return MetaData.builder().put(builder).build(); + } + +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichRestartIT.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichRestartIT.java new file mode 100644 index 00000000000..b8c416160b3 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichRestartIT.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.reindex.ReindexPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Optional; + +import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.DECORATE_FIELDS; +import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.MATCH_FIELD; +import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.POLICY_NAME; +import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.SOURCE_INDEX_NAME; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) +public class EnrichRestartIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateEnrich.class, ReindexPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return nodePlugins(); + } + + @Override + protected Settings transportClientSettings() { + return Settings.builder().put(super.transportClientSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); + } + + public void testRestart() throws Exception { + final int numPolicies = randomIntBetween(2, 4); + internalCluster().startNode(); + + EnrichPolicy enrichPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, + Collections.singletonList(SOURCE_INDEX_NAME), MATCH_FIELD, Arrays.asList(DECORATE_FIELDS)); + for (int i = 0; i < numPolicies; i++) { + String policyName = POLICY_NAME + i; + PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, request).actionGet(); + } + + verifyPolicies(numPolicies, enrichPolicy); + // After full restart the policies should still exist: + internalCluster().fullRestart(); + verifyPolicies(numPolicies, enrichPolicy); + } + + private static void verifyPolicies(int numPolicies, EnrichPolicy enrichPolicy) { + GetEnrichPolicyAction.Response response = + client().execute(GetEnrichPolicyAction.INSTANCE, new GetEnrichPolicyAction.Request()).actionGet(); + assertThat(response.getPolicies().size(), equalTo(numPolicies)); + for (int i = 0; i < numPolicies; i++) { + String policyName = POLICY_NAME + i; + Optional result = response.getPolicies().stream() + .filter(namedPolicy -> namedPolicy.getName().equals(policyName)) + .findFirst(); + assertThat(result.isPresent(), is(true)); + assertThat(result.get().getPolicy(), equalTo(enrichPolicy)); + } + } + +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichStoreCrudTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichStoreCrudTests.java new file mode 100644 index 00000000000..d69a2c76e49 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichStoreCrudTests.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; + +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.randomEnrichPolicy; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class EnrichStoreCrudTests extends AbstractEnrichTestCase { + + public void testCrud() throws Exception { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + EnrichPolicy result = EnrichStore.getPolicy(name, clusterService.state()); + assertThat(result, equalTo(policy)); + + Map listPolicies = EnrichStore.getPolicies(clusterService.state()); + assertThat(listPolicies.size(), equalTo(1)); + assertThat(listPolicies.get(name), equalTo(policy)); + + deleteEnrichPolicy(name, clusterService); + result = EnrichStore.getPolicy(name, clusterService.state()); + assertThat(result, nullValue()); + } + + public void testImmutability() throws Exception { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + error = saveEnrichPolicy(name, policy, clusterService); + assertTrue(error.get().getMessage().contains("policy [my-policy] already exists"));; + + deleteEnrichPolicy(name, clusterService); + EnrichPolicy result = EnrichStore.getPolicy(name, clusterService.state()); + assertThat(result, nullValue()); + } + + public void testPutValidation() throws Exception { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + { + String nullOrEmptyName = randomBoolean() ? "" : null; + + IllegalArgumentException error = expectThrows(IllegalArgumentException.class, + () -> saveEnrichPolicy(nullOrEmptyName, policy, clusterService)); + + assertThat(error.getMessage(), equalTo("name is missing or empty")); + } + { + IllegalArgumentException error = expectThrows(IllegalArgumentException.class, + () -> saveEnrichPolicy("my-policy", null, clusterService)); + + assertThat(error.getMessage(), equalTo("policy is missing")); + } + { + IllegalArgumentException error = + expectThrows(IllegalArgumentException.class, () -> saveEnrichPolicy("my#policy", policy, clusterService)); + assertThat(error.getMessage(), equalTo("Invalid policy name [my#policy], must not contain '#'")); + } + { + IllegalArgumentException error = + expectThrows(IllegalArgumentException.class, () -> saveEnrichPolicy("..", policy, clusterService)); + assertThat(error.getMessage(), equalTo("Invalid policy name [..], must not be '.' or '..'")); + } + { + IllegalArgumentException error = + expectThrows(IllegalArgumentException.class, () -> saveEnrichPolicy("myPolicy", policy, clusterService)); + assertThat(error.getMessage(), equalTo("Invalid policy name [myPolicy], must be lowercase")); + } + } + + public void testDeleteValidation() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + { + String nullOrEmptyName = randomBoolean() ? "" : null; + + IllegalArgumentException error = expectThrows(IllegalArgumentException.class, + () -> deleteEnrichPolicy(nullOrEmptyName, clusterService)); + + assertThat(error.getMessage(), equalTo("name is missing or empty")); + } + { + ResourceNotFoundException error = expectThrows(ResourceNotFoundException.class, + () -> deleteEnrichPolicy("my-policy", clusterService)); + + assertThat(error.getMessage(), equalTo("policy [my-policy] not found")); + } + } + + public void testGetValidation() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String nullOrEmptyName = randomBoolean() ? "" : null; + + IllegalArgumentException error = expectThrows(IllegalArgumentException.class, + () -> EnrichStore.getPolicy(nullOrEmptyName, clusterService.state())); + + assertThat(error.getMessage(), equalTo("name is missing or empty")); + + EnrichPolicy policy = EnrichStore.getPolicy("null-policy", clusterService.state()); + assertNull(policy); + } + + public void testListValidation() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + Map policies = EnrichStore.getPolicies(clusterService.state()); + assertTrue(policies.isEmpty()); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java new file mode 100644 index 00000000000..7d1933d56b7 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.MultiPoint; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.GeoShapeQueryBuilder; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.test.ESTestCase; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; + +import static org.elasticsearch.xpack.enrich.MatchProcessorTests.mapOf; +import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class GeoMatchProcessorTests extends ESTestCase { + + public void testBasics() { + Point expectedPoint = new Point(-122.084110, 37.386637); + testBasicsForFieldValue(mapOf("lat", 37.386637, "lon", -122.084110), expectedPoint); + testBasicsForFieldValue("37.386637, -122.084110", expectedPoint); + testBasicsForFieldValue("POINT (-122.084110 37.386637)", expectedPoint); + testBasicsForFieldValue(Arrays.asList(-122.084110, 37.386637), expectedPoint); + testBasicsForFieldValue(Arrays.asList(Arrays.asList(-122.084110, 37.386637), + "37.386637, -122.084110", "POINT (-122.084110 37.386637)"), + new MultiPoint(Arrays.asList(expectedPoint, expectedPoint, expectedPoint))); + + testBasicsForFieldValue("not a point", null); + } + + private void testBasicsForFieldValue(Object fieldValue, Geometry expectedGeometry) { + int maxMatches = randomIntBetween(1, 8); + MockSearchFunction mockSearch = mockedSearchFunction(mapOf("key", mapOf("shape", "object", "zipcode",94040))); + GeoMatchProcessor processor = new GeoMatchProcessor("_tag", mockSearch, "_name", "location", "entry", + false, false, "shape", maxMatches, ShapeRelation.INTERSECTS); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", "_routing", 1L, VersionType.INTERNAL, + mapOf("location", fieldValue)); + // Run + IngestDocument[] holder = new IngestDocument[1]; + processor.execute(ingestDocument, (result, e) -> holder[0] = result); + if (expectedGeometry == null) { + assertThat(holder[0], nullValue()); + return; + } else { + assertThat(holder[0], notNullValue()); + } + // Check request + SearchRequest request = mockSearch.getCapturedRequest(); + assertThat(request.indices().length, equalTo(1)); + assertThat(request.indices()[0], equalTo(".enrich-_name")); + assertThat(request.preference(), equalTo(Preference.LOCAL.type())); + assertThat(request.source().size(), equalTo(maxMatches)); + assertThat(request.source().trackScores(), equalTo(false)); + assertThat(request.source().fetchSource().fetchSource(), equalTo(true)); + assertThat(request.source().fetchSource().excludes(), emptyArray()); + assertThat(request.source().fetchSource().includes(), emptyArray()); + assertThat(request.source().query(), instanceOf(ConstantScoreQueryBuilder.class)); + assertThat(((ConstantScoreQueryBuilder) request.source().query()).innerQuery(), instanceOf(GeoShapeQueryBuilder.class)); + GeoShapeQueryBuilder shapeQueryBuilder = (GeoShapeQueryBuilder) ((ConstantScoreQueryBuilder) request.source().query()).innerQuery(); + assertThat(shapeQueryBuilder.fieldName(), equalTo("shape")); + assertThat(shapeQueryBuilder.shape(), equalTo(expectedGeometry)); + + // Check result + Map entry; + if (maxMatches == 1) { + entry = ingestDocument.getFieldValue("entry", Map.class); + } else { + List entries = ingestDocument.getFieldValue("entry", List.class); + entry = (Map) entries.get(0); + } + assertThat(entry.size(), equalTo(2)); + assertThat(entry.get("zipcode"), equalTo(94040)); + + } + + private static final class MockSearchFunction implements BiConsumer> { + private final SearchResponse mockResponse; + private final SetOnce capturedRequest; + private final Exception exception; + + MockSearchFunction(SearchResponse mockResponse) { + this.mockResponse = mockResponse; + this.exception = null; + this.capturedRequest = new SetOnce<>(); + } + + MockSearchFunction(Exception exception) { + this.mockResponse = null; + this.exception = exception; + this.capturedRequest = new SetOnce<>(); + } + + @Override + public void accept(SearchRequest request, BiConsumer handler) { + capturedRequest.set(request); + if (exception != null) { + handler.accept(null, exception); + } else { + handler.accept(mockResponse, null); + } + } + + SearchRequest getCapturedRequest() { + return capturedRequest.get(); + } + } + + public MockSearchFunction mockedSearchFunction() { + return new MockSearchFunction(mockResponse(Collections.emptyMap())); + } + + public MockSearchFunction mockedSearchFunction(Exception exception) { + return new MockSearchFunction(exception); + } + + public MockSearchFunction mockedSearchFunction(Map> documents) { + return new MockSearchFunction(mockResponse(documents)); + } + + public SearchResponse mockResponse(Map> documents) { + SearchHit[] searchHits = documents.entrySet().stream().map(e -> { + SearchHit searchHit = new SearchHit(randomInt(100), e.getKey(), new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap()); + try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { + builder.map(e.getValue()); + builder.flush(); + ByteArrayOutputStream outputStream = (ByteArrayOutputStream) builder.getOutputStream(); + searchHit.sourceRef(new BytesArray(outputStream.toByteArray())); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + return searchHit; + }).toArray(SearchHit[]::new); + return new SearchResponse(new SearchResponseSections( + new SearchHits(searchHits, new TotalHits(documents.size(), TotalHits.Relation.EQUAL_TO), 1.0f), + new Aggregations(Collections.emptyList()), new Suggest(Collections.emptyList()), + false, false, null, 1), null, 1, 1, 0, 1, ShardSearchFailure.EMPTY_ARRAY, new SearchResponse.Clusters(1, 1, 0)); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/LocalStateEnrich.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/LocalStateEnrich.java new file mode 100644 index 00000000000..ca8c4299355 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/LocalStateEnrich.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; + +import java.nio.file.Path; + +public class LocalStateEnrich extends LocalStateCompositeXPackPlugin { + + public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + + plugins.add(new EnrichPlugin(settings) { + @Override + protected XPackLicenseState getLicenseState() { + return LocalStateEnrich.this.getLicenseState(); + } + }); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java new file mode 100644 index 00000000000..904eefc8398 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -0,0 +1,372 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich; + +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.test.ESTestCase; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; + +import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class MatchProcessorTests extends ESTestCase { + + public void testBasics() throws Exception { + int maxMatches = randomIntBetween(1, 8); + MockSearchFunction mockSearch = mockedSearchFunction(mapOf("elastic.co", mapOf("globalRank", 451, "tldRank",23, "tld", "co"))); + MatchProcessor processor = new MatchProcessor("_tag", mockSearch, "_name", "domain", "entry", true, false, "domain", maxMatches); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", "_routing", 1L, VersionType.INTERNAL, + Collections.singletonMap("domain", "elastic.co")); + // Run + IngestDocument[] holder = new IngestDocument[1]; + processor.execute(ingestDocument, (result, e) -> holder[0] = result); + assertThat(holder[0], notNullValue()); + // Check request + SearchRequest request = mockSearch.getCapturedRequest(); + assertThat(request.indices().length, equalTo(1)); + assertThat(request.indices()[0], equalTo(".enrich-_name")); + assertThat(request.preference(), equalTo(Preference.LOCAL.type())); + assertThat(request.source().size(), equalTo(maxMatches)); + assertThat(request.source().trackScores(), equalTo(false)); + assertThat(request.source().fetchSource().fetchSource(), equalTo(true)); + assertThat(request.source().fetchSource().excludes(), emptyArray()); + assertThat(request.source().fetchSource().includes(), emptyArray()); + assertThat(request.source().query(), instanceOf(ConstantScoreQueryBuilder.class)); + assertThat(((ConstantScoreQueryBuilder) request.source().query()).innerQuery(), instanceOf(TermQueryBuilder.class)); + TermQueryBuilder termQueryBuilder = (TermQueryBuilder) ((ConstantScoreQueryBuilder) request.source().query()).innerQuery(); + assertThat(termQueryBuilder.fieldName(), equalTo("domain")); + assertThat(termQueryBuilder.value(), equalTo("elastic.co")); + // Check result + Map entry; + if (maxMatches == 1) { + entry = ingestDocument.getFieldValue("entry", Map.class); + } else { + List entries = ingestDocument.getFieldValue("entry", List.class); + entry = (Map) entries.get(0); + } + assertThat(entry.size(), equalTo(3)); + assertThat(entry.get("globalRank"), equalTo(451)); + assertThat(entry.get("tldRank"), equalTo(23)); + assertThat(entry.get("tld"), equalTo("co")); + } + + public void testNoMatch() throws Exception { + MockSearchFunction mockSearch = mockedSearchFunction(); + MatchProcessor processor = new MatchProcessor("_tag", mockSearch, "_name", "domain", "entry", true, false, "domain", 1); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", "_routing", 1L, VersionType.INTERNAL, + Collections.singletonMap("domain", "elastic.com")); + int numProperties = ingestDocument.getSourceAndMetadata().size(); + // Run + IngestDocument[] holder = new IngestDocument[1]; + processor.execute(ingestDocument, (result, e) -> holder[0] = result); + assertThat(holder[0], notNullValue()); + // Check request + SearchRequest request = mockSearch.getCapturedRequest(); + assertThat(request.indices().length, equalTo(1)); + assertThat(request.indices()[0], equalTo(".enrich-_name")); + assertThat(request.preference(), equalTo(Preference.LOCAL.type())); + assertThat(request.source().size(), equalTo(1)); + assertThat(request.source().trackScores(), equalTo(false)); + assertThat(request.source().fetchSource().fetchSource(), equalTo(true)); + assertThat(request.source().fetchSource().includes(), emptyArray()); + assertThat(request.source().fetchSource().excludes(), emptyArray()); + assertThat(request.source().query(), instanceOf(ConstantScoreQueryBuilder.class)); + assertThat(((ConstantScoreQueryBuilder) request.source().query()).innerQuery(), instanceOf(TermQueryBuilder.class)); + TermQueryBuilder termQueryBuilder = (TermQueryBuilder) ((ConstantScoreQueryBuilder) request.source().query()).innerQuery(); + assertThat(termQueryBuilder.fieldName(), equalTo("domain")); + assertThat(termQueryBuilder.value(), equalTo("elastic.com")); + // Check result + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(numProperties)); + } + + public void testSearchFailure() throws Exception { + String indexName = ".enrich-_name"; + MockSearchFunction mockSearch = mockedSearchFunction(new IndexNotFoundException(indexName)); + MatchProcessor processor = new MatchProcessor("_tag", mockSearch, "_name", "domain", "entry", true, false, "domain", 1); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", "_routing", 1L, VersionType.INTERNAL, + Collections.singletonMap("domain", "elastic.com")); + // Run + IngestDocument[] resultHolder = new IngestDocument[1]; + Exception[] exceptionHolder = new Exception[1]; + processor.execute(ingestDocument, (result, e) -> { + resultHolder[0] = result; + exceptionHolder[0] = e; + }); + assertThat(resultHolder[0], nullValue()); + assertThat(exceptionHolder[0], notNullValue()); + assertThat(exceptionHolder[0], instanceOf(IndexNotFoundException.class)); + // Check request + SearchRequest request = mockSearch.getCapturedRequest(); + assertThat(request.indices().length, equalTo(1)); + assertThat(request.indices()[0], equalTo(".enrich-_name")); + assertThat(request.preference(), equalTo(Preference.LOCAL.type())); + assertThat(request.source().size(), equalTo(1)); + assertThat(request.source().trackScores(), equalTo(false)); + assertThat(request.source().fetchSource().fetchSource(), equalTo(true)); + assertThat(request.source().fetchSource().includes(), emptyArray()); + assertThat(request.source().fetchSource().excludes(), emptyArray()); + assertThat(request.source().query(), instanceOf(ConstantScoreQueryBuilder.class)); + assertThat(((ConstantScoreQueryBuilder) request.source().query()).innerQuery(), instanceOf(TermQueryBuilder.class)); + TermQueryBuilder termQueryBuilder = (TermQueryBuilder) ((ConstantScoreQueryBuilder) request.source().query()).innerQuery(); + assertThat(termQueryBuilder.fieldName(), equalTo("domain")); + assertThat(termQueryBuilder.value(), equalTo("elastic.com")); + // Check result + assertThat(exceptionHolder[0].getMessage(), equalTo("no such index [" + indexName + "]")); + } + + public void testIgnoreKeyMissing() throws Exception { + { + MatchProcessor processor = + new MatchProcessor("_tag", mockedSearchFunction(), "_name", "domain", "entry", true, true, "domain", 1); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", "_routing", 1L, VersionType.INTERNAL, mapOf()); + + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(6)); + IngestDocument[] holder = new IngestDocument[1]; + processor.execute(ingestDocument, (result, e) -> holder[0] = result); + assertThat(holder[0], notNullValue()); + assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(6)); + } + { + MatchProcessor processor = + new MatchProcessor("_tag", mockedSearchFunction(), "_name", "domain", "entry", true, false, "domain", 1); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", "_routing", 1L, VersionType.INTERNAL, mapOf()); + IngestDocument[] resultHolder = new IngestDocument[1]; + Exception[] exceptionHolder = new Exception[1]; + processor.execute(ingestDocument, (result, e) -> { + resultHolder[0] = result; + exceptionHolder[0] = e; + }); + assertThat(resultHolder[0], nullValue()); + assertThat(exceptionHolder[0], notNullValue()); + assertThat(exceptionHolder[0], instanceOf(IllegalArgumentException.class)); + } + } + + public void testExistingFieldWithOverrideDisabled() throws Exception { + MockSearchFunction mockSearch = mockedSearchFunction(mapOf("elastic.co", mapOf("globalRank", 451, "tldRank",23, "tld", "co"))); + MatchProcessor processor = new MatchProcessor("_tag", mockSearch, "_name", "domain", "entry", false, false, "domain", 1); + + IngestDocument ingestDocument = new IngestDocument(new HashMap<>(mapOf("domain", "elastic.co", "tld", "tld")), mapOf()); + IngestDocument[] resultHolder = new IngestDocument[1]; + Exception[] exceptionHolder = new Exception[1]; + processor.execute(ingestDocument, (result, e) -> { + resultHolder[0] = result; + exceptionHolder[0] = e; + }); + assertThat(exceptionHolder[0], nullValue()); + assertThat(resultHolder[0].hasField("tld"), equalTo(true)); + assertThat(resultHolder[0].getFieldValue("tld", Object.class), equalTo("tld")); + } + + public void testExistingNullFieldWithOverrideDisabled() throws Exception { + MockSearchFunction mockSearch = mockedSearchFunction(mapOf("elastic.co", mapOf("globalRank", 451, "tldRank",23, "tld", "co"))); + MatchProcessor processor = new MatchProcessor("_tag", mockSearch, "_name", "domain", "entry", false, false, "domain", 1); + + Map source = new HashMap<>(); + source.put("domain", "elastic.co"); + source.put("tld", null); + IngestDocument ingestDocument = new IngestDocument(source, mapOf()); + IngestDocument[] resultHolder = new IngestDocument[1]; + Exception[] exceptionHolder = new Exception[1]; + processor.execute(ingestDocument, (result, e) -> { + resultHolder[0] = result; + exceptionHolder[0] = e; + }); + assertThat(exceptionHolder[0], nullValue()); + assertThat(resultHolder[0].hasField("tld"), equalTo(true)); + assertThat(resultHolder[0].getFieldValue("tld", Object.class), equalTo(null)); + } + + public void testNumericValue() { + MockSearchFunction mockSearch = mockedSearchFunction(mapOf(2, mapOf("globalRank", 451, "tldRank", 23, "tld", "co"))); + MatchProcessor processor = + new MatchProcessor("_tag", mockSearch, "_name", "domain", "entry", false, true, "domain", 1); + IngestDocument ingestDocument = + new IngestDocument("_index", "_type", "_id", "_routing", 1L, VersionType.INTERNAL, mapOf("domain", 2)); + + // Execute + IngestDocument[] holder = new IngestDocument[1]; + processor.execute(ingestDocument, (result, e) -> holder[0] = result); + assertThat(holder[0], notNullValue()); + + // Check request + SearchRequest request = mockSearch.getCapturedRequest(); + assertThat(request.source().query(), instanceOf(ConstantScoreQueryBuilder.class)); + assertThat(((ConstantScoreQueryBuilder) request.source().query()).innerQuery(), instanceOf(TermQueryBuilder.class)); + TermQueryBuilder termQueryBuilder = (TermQueryBuilder) ((ConstantScoreQueryBuilder) request.source().query()).innerQuery(); + assertThat(termQueryBuilder.fieldName(), equalTo("domain")); + assertThat(termQueryBuilder.value(), equalTo(2)); + + // Check result + Map entry = ingestDocument.getFieldValue("entry", Map.class); + assertThat(entry.size(), equalTo(3)); + assertThat(entry.get("globalRank"), equalTo(451)); + assertThat(entry.get("tldRank"), equalTo(23)); + assertThat(entry.get("tld"), equalTo("co")); + } + + public void testArray() { + MockSearchFunction mockSearch = + mockedSearchFunction(mapOf(Arrays.asList("1", "2"), mapOf("globalRank", 451, "tldRank", 23, "tld", "co"))); + MatchProcessor processor = + new MatchProcessor("_tag", mockSearch, "_name", "domain", "entry", false, true, "domain", 1); + IngestDocument ingestDocument = + new IngestDocument("_index", "_type", "_id", "_routing", 1L, VersionType.INTERNAL, mapOf("domain", Arrays.asList("1", "2"))); + + // Execute + IngestDocument[] holder = new IngestDocument[1]; + processor.execute(ingestDocument, (result, e) -> holder[0] = result); + assertThat(holder[0], notNullValue()); + + // Check request + SearchRequest request = mockSearch.getCapturedRequest(); + assertThat(request.source().query(), instanceOf(ConstantScoreQueryBuilder.class)); + assertThat(((ConstantScoreQueryBuilder) request.source().query()).innerQuery(), instanceOf(TermsQueryBuilder.class)); + TermsQueryBuilder termQueryBuilder = (TermsQueryBuilder) ((ConstantScoreQueryBuilder) request.source().query()).innerQuery(); + assertThat(termQueryBuilder.fieldName(), equalTo("domain")); + assertThat(termQueryBuilder.values().size(), equalTo(2)); + assertThat(termQueryBuilder.values().get(0), equalTo("1")); + assertThat(termQueryBuilder.values().get(1), equalTo("2")); + + // Check result + Map entry = ingestDocument.getFieldValue("entry", Map.class); + assertThat(entry.size(), equalTo(3)); + assertThat(entry.get("globalRank"), equalTo(451)); + assertThat(entry.get("tldRank"), equalTo(23)); + assertThat(entry.get("tld"), equalTo("co")); + } + + private static final class MockSearchFunction implements BiConsumer> { + private final SearchResponse mockResponse; + private final SetOnce capturedRequest; + private final Exception exception; + + MockSearchFunction(SearchResponse mockResponse) { + this.mockResponse = mockResponse; + this.exception = null; + this.capturedRequest = new SetOnce<>(); + } + + MockSearchFunction(Exception exception) { + this.mockResponse = null; + this.exception = exception; + this.capturedRequest = new SetOnce<>(); + } + + @Override + public void accept(SearchRequest request, BiConsumer handler) { + capturedRequest.set(request); + if (exception != null) { + handler.accept(null, exception); + } else { + handler.accept(mockResponse, null); + } + } + + SearchRequest getCapturedRequest() { + return capturedRequest.get(); + } + } + + public MockSearchFunction mockedSearchFunction() { + return new MockSearchFunction(mockResponse(Collections.emptyMap())); + } + + public MockSearchFunction mockedSearchFunction(Exception exception) { + return new MockSearchFunction(exception); + } + + public MockSearchFunction mockedSearchFunction(Map> documents) { + return new MockSearchFunction(mockResponse(documents)); + } + + public SearchResponse mockResponse(Map> documents) { + SearchHit[] searchHits = documents.entrySet().stream().map(e -> { + SearchHit searchHit = new SearchHit(randomInt(100), e.getKey().toString(), new Text(MapperService.SINGLE_MAPPING_NAME), + Collections.emptyMap()); + try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { + builder.map(e.getValue()); + builder.flush(); + ByteArrayOutputStream outputStream = (ByteArrayOutputStream) builder.getOutputStream(); + searchHit.sourceRef(new BytesArray(outputStream.toByteArray())); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + return searchHit; + }).toArray(SearchHit[]::new); + return new SearchResponse(new SearchResponseSections( + new SearchHits(searchHits, new TotalHits(documents.size(), TotalHits.Relation.EQUAL_TO), 1.0f), + new Aggregations(Collections.emptyList()), new Suggest(Collections.emptyList()), + false, false, null, 1), null, 1, 1, 0, 1, ShardSearchFailure.EMPTY_ARRAY, new SearchResponse.Clusters(1, 1, 0)); + } + + static Map mapOf() { + return Collections.emptyMap(); + } + + static Map mapOf(K key1, V value1) { + Map map = new HashMap<>(); + map.put(key1, value1); + return map; + } + + static Map mapOf(K key1, V value1, K key2, V value2) { + Map map = new HashMap<>(); + map.put(key1, value1); + map.put(key2, value2); + return map; + } + + static Map mapOf(String key1, Object value1, String key2, Object value2, String key3, Object value3) { + Map map = new HashMap<>(); + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + return map; + } + + static Map mapOf(K key1, V value1, K key2, V value2, K key3, V value3, K key4, V value4) { + Map map = new HashMap<>(); + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + map.put(key4, value4); + return map; + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java new file mode 100644 index 00000000000..19d3df511b4 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java @@ -0,0 +1,312 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.apache.logging.log4j.util.BiConsumer; +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.mockito.Mockito; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.xpack.enrich.action.EnrichCoordinatorProxyAction.Coordinator; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class CoordinatorTests extends ESTestCase { + + public void testCoordinateLookups() { + MockLookupFunction lookupFunction = new MockLookupFunction(); + Coordinator coordinator = new Coordinator(lookupFunction, 5, 1, 100); + + List> searchActionListeners = new ArrayList<>(); + for (int i = 0; i < 9; i++) { + SearchRequest searchRequest = new SearchRequest("my-index"); + searchRequest.source().query(new MatchQueryBuilder("my_field", String.valueOf(i))); + @SuppressWarnings("unchecked") + ActionListener actionListener = Mockito.mock(ActionListener.class); + searchActionListeners.add(actionListener); + coordinator.queue.add(new Coordinator.Slot(searchRequest, actionListener)); + } + + SearchRequest searchRequest = new SearchRequest("my-index"); + searchRequest.source().query(new MatchQueryBuilder("my_field", String.valueOf(10))); + @SuppressWarnings("unchecked") + ActionListener actionListener = Mockito.mock(ActionListener.class); + searchActionListeners.add(actionListener); + coordinator.schedule(searchRequest, actionListener); + + // First batch of search requests have been sent off: + // (However still 5 should remain in the queue) + assertThat(coordinator.queue.size(), equalTo(5)); + assertThat(coordinator.remoteRequestsCurrent.get(), equalTo(1)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(1)); + assertThat(lookupFunction.capturedRequests.get(0).requests().size(), equalTo(5)); + + // Nothing should happen now, because there is an outstanding request and max number of requests has been set to 1: + coordinator.coordinateLookups(); + assertThat(coordinator.queue.size(), equalTo(5)); + assertThat(coordinator.remoteRequestsCurrent.get(), equalTo(1)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(1)); + + SearchResponse emptyResponse = emptySearchResponse(); + // Replying a response and that should trigger another coordination round + MultiSearchResponse.Item[] responseItems = new MultiSearchResponse.Item[5]; + for (int i = 0; i < 5; i++) { + responseItems[i] = new MultiSearchResponse.Item(emptyResponse, null); + } + lookupFunction.capturedConsumers.get(0).accept(new MultiSearchResponse(responseItems, 1L), null); + assertThat(coordinator.queue.size(), equalTo(0)); + assertThat(coordinator.remoteRequestsCurrent.get(), equalTo(1)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(2)); + + // Replying last response, resulting in an empty queue and no outstanding requests. + responseItems = new MultiSearchResponse.Item[5]; + for (int i = 0; i < 5; i++) { + responseItems[i] = new MultiSearchResponse.Item(emptyResponse, null); + } + lookupFunction.capturedConsumers.get(1).accept(new MultiSearchResponse(responseItems, 1L), null); + assertThat(coordinator.queue.size(), equalTo(0)); + assertThat(coordinator.remoteRequestsCurrent.get(), equalTo(0)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(2)); + + // All individual action listeners for the search requests should have been invoked: + for (ActionListener searchActionListener : searchActionListeners) { + Mockito.verify(searchActionListener).onResponse(Mockito.eq(emptyResponse)); + } + } + + public void testCoordinateLookupsMultiSearchError() { + MockLookupFunction lookupFunction = new MockLookupFunction(); + Coordinator coordinator = new Coordinator(lookupFunction, 5, 1, 100); + + List> searchActionListeners = new ArrayList<>(); + for (int i = 0; i < 4; i++) { + SearchRequest searchRequest = new SearchRequest("my-index"); + searchRequest.source().query(new MatchQueryBuilder("my_field", String.valueOf(i))); + @SuppressWarnings("unchecked") + ActionListener actionListener = Mockito.mock(ActionListener.class); + searchActionListeners.add(actionListener); + coordinator.queue.add(new Coordinator.Slot(searchRequest, actionListener)); + } + + SearchRequest searchRequest = new SearchRequest("my-index"); + searchRequest.source().query(new MatchQueryBuilder("my_field", String.valueOf(5))); + @SuppressWarnings("unchecked") + ActionListener actionListener = Mockito.mock(ActionListener.class); + searchActionListeners.add(actionListener); + coordinator.schedule(searchRequest, actionListener); + + // First batch of search requests have been sent off: + // (However still 5 should remain in the queue) + assertThat(coordinator.queue.size(), equalTo(0)); + assertThat(coordinator.remoteRequestsCurrent.get(), equalTo(1)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(1)); + assertThat(lookupFunction.capturedRequests.get(0).requests().size(), equalTo(5)); + + RuntimeException e = new RuntimeException(); + lookupFunction.capturedConsumers.get(0).accept(null, e); + assertThat(coordinator.queue.size(), equalTo(0)); + assertThat(coordinator.remoteRequestsCurrent.get(), equalTo(0)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(1)); + + // All individual action listeners for the search requests should have been invoked: + for (ActionListener searchActionListener : searchActionListeners) { + Mockito.verify(searchActionListener).onFailure(Mockito.eq(e)); + } + } + + public void testCoordinateLookupsMultiSearchItemError() { + MockLookupFunction lookupFunction = new MockLookupFunction(); + Coordinator coordinator = new Coordinator(lookupFunction, 5, 1, 100); + + List> searchActionListeners = new ArrayList<>(); + for (int i = 0; i < 4; i++) { + SearchRequest searchRequest = new SearchRequest("my-index"); + searchRequest.source().query(new MatchQueryBuilder("my_field", String.valueOf(i))); + @SuppressWarnings("unchecked") + ActionListener actionListener = Mockito.mock(ActionListener.class); + searchActionListeners.add(actionListener); + coordinator.queue.add(new Coordinator.Slot(searchRequest, actionListener)); + } + + SearchRequest searchRequest = new SearchRequest("my-index"); + searchRequest.source().query(new MatchQueryBuilder("my_field", String.valueOf(5))); + @SuppressWarnings("unchecked") + ActionListener actionListener = Mockito.mock(ActionListener.class); + searchActionListeners.add(actionListener); + coordinator.schedule(searchRequest, actionListener); + + // First batch of search requests have been sent off: + // (However still 5 should remain in the queue) + assertThat(coordinator.queue.size(), equalTo(0)); + assertThat(coordinator.remoteRequestsCurrent.get(), equalTo(1)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(1)); + assertThat(lookupFunction.capturedRequests.get(0).requests().size(), equalTo(5)); + + RuntimeException e = new RuntimeException(); + // Replying a response and that should trigger another coordination round + MultiSearchResponse.Item[] responseItems = new MultiSearchResponse.Item[5]; + for (int i = 0; i < 5; i++) { + responseItems[i] = new MultiSearchResponse.Item(null, e); + } + lookupFunction.capturedConsumers.get(0).accept(new MultiSearchResponse(responseItems, 1L), null); + assertThat(coordinator.queue.size(), equalTo(0)); + assertThat(coordinator.remoteRequestsCurrent.get(), equalTo(0)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(1)); + + // All individual action listeners for the search requests should have been invoked: + for (ActionListener searchActionListener : searchActionListeners) { + Mockito.verify(searchActionListener).onFailure(Mockito.eq(e)); + } + } + + public void testQueueing() throws Exception { + MockLookupFunction lookupFunction = new MockLookupFunction(); + Coordinator coordinator = new Coordinator(lookupFunction, 1, 1, 1); + coordinator.queue.add(new Coordinator.Slot(new SearchRequest(), ActionListener.wrap(() -> {}))); + + AtomicBoolean completed = new AtomicBoolean(false); + SearchRequest searchRequest = new SearchRequest(); + Thread t = new Thread(() -> { + coordinator.schedule(searchRequest, ActionListener.wrap(() -> {})); + completed.set(true); + }); + t.start(); + assertBusy(() -> { + assertThat(t.getState(), equalTo(Thread.State.WAITING)); + assertThat(completed.get(), is(false)); + }); + + coordinator.coordinateLookups(); + assertBusy(() -> { + assertThat(completed.get(), is(true)); + }); + + lookupFunction.capturedConsumers.get(0).accept( + new MultiSearchResponse(new MultiSearchResponse.Item[]{new MultiSearchResponse.Item(emptySearchResponse(), null)}, 1L), null); + assertThat(coordinator.queue.size(), equalTo(0)); + assertThat(lookupFunction.capturedRequests.size(), equalTo(2)); + assertThat(lookupFunction.capturedRequests.get(1).requests().get(0), sameInstance(searchRequest)); + } + + public void testLookupFunction() { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + List indices = Arrays.asList("index1", "index2", "index3"); + for (String index : indices) { + multiSearchRequest.add(new SearchRequest(index)); + multiSearchRequest.add(new SearchRequest(index)); + } + + List requests = new ArrayList<>(); + ElasticsearchClient client = new ElasticsearchClient() { + + @Override + public ActionFuture execute( + ActionType action, Request request) { + throw new UnsupportedOperationException(); + } + + @Override + public void execute(ActionType action, + Request request, + ActionListener listener) { + requests.add((EnrichShardMultiSearchAction.Request) request); + } + + @Override + public ThreadPool threadPool() { + throw new UnsupportedOperationException(); + } + }; + BiConsumer> consumer = Coordinator.lookupFunction(client); + consumer.accept(multiSearchRequest, null); + + assertThat(requests.size(), equalTo(indices.size())); + requests.sort(Comparator.comparing(SingleShardRequest::index)); + for (int i = 0; i < indices.size(); i++) { + String index = indices.get(i); + assertThat(requests.get(i).index(), equalTo(index)); + assertThat(requests.get(i).getMultiSearchRequest().requests().size(), equalTo(2)); + assertThat(requests.get(i).getMultiSearchRequest().requests().get(0).indices().length, equalTo(1)); + assertThat(requests.get(i).getMultiSearchRequest().requests().get(0).indices()[0], equalTo(index)); + } + } + + public void testReduce() { + Map>> itemsPerIndex = new HashMap<>(); + Map> shardResponses = new HashMap<>(); + + MultiSearchResponse.Item item1 = new MultiSearchResponse.Item(emptySearchResponse(), null); + itemsPerIndex.put("index1", Arrays.asList(new Tuple<>(0, null), new Tuple<>(1, null), new Tuple<>(2, null))); + shardResponses.put("index1", new Tuple<>(new MultiSearchResponse(new MultiSearchResponse.Item[]{item1, item1, item1}, 1), null)); + + Exception failure = new RuntimeException(); + itemsPerIndex.put("index2", Arrays.asList(new Tuple<>(3, null), new Tuple<>(4, null), new Tuple<>(5, null))); + shardResponses.put("index2", new Tuple<>(null, failure)); + + MultiSearchResponse.Item item2 = new MultiSearchResponse.Item(emptySearchResponse(), null); + itemsPerIndex.put("index3", Arrays.asList(new Tuple<>(6, null), new Tuple<>(7, null), new Tuple<>(8, null))); + shardResponses.put("index3", new Tuple<>(new MultiSearchResponse(new MultiSearchResponse.Item[]{item2, item2, item2}, 1), null)); + + MultiSearchResponse result = Coordinator.reduce(9, itemsPerIndex, shardResponses); + assertThat(result.getResponses().length, equalTo(9)); + assertThat(result.getResponses()[0], sameInstance(item1)); + assertThat(result.getResponses()[1], sameInstance(item1)); + assertThat(result.getResponses()[2], sameInstance(item1)); + assertThat(result.getResponses()[3].getFailure(), sameInstance(failure)); + assertThat(result.getResponses()[4].getFailure(), sameInstance(failure)); + assertThat(result.getResponses()[5].getFailure(), sameInstance(failure)); + assertThat(result.getResponses()[6], sameInstance(item2)); + assertThat(result.getResponses()[7], sameInstance(item2)); + assertThat(result.getResponses()[8], sameInstance(item2)); + } + + private static SearchResponse emptySearchResponse() { + InternalSearchResponse response = new InternalSearchResponse(new SearchHits(new SearchHit[0], + new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), InternalAggregations.EMPTY, null, null, false, null, 1); + return new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + } + + private class MockLookupFunction implements BiConsumer> { + + private final List capturedRequests = new ArrayList<>(); + private final List> capturedConsumers = new ArrayList<>(); + + @Override + public void accept(MultiSearchRequest multiSearchRequest, BiConsumer consumer) { + capturedRequests.add(multiSearchRequest); + capturedConsumers.add(consumer); + } + } + +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/DeleteEnrichPolicyActionRequestTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/DeleteEnrichPolicyActionRequestTests.java new file mode 100644 index 00000000000..e5a32919198 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/DeleteEnrichPolicyActionRequestTests.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; + +public class DeleteEnrichPolicyActionRequestTests extends AbstractWireSerializingTestCase { + @Override + protected DeleteEnrichPolicyAction.Request createTestInstance() { + return new DeleteEnrichPolicyAction.Request(randomAlphaOfLength(4)); + } + + @Override + protected Writeable.Reader instanceReader() { + return DeleteEnrichPolicyAction.Request::new; + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java new file mode 100644 index 00000000000..5d1bb6e98c4 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.enrich.LocalStateEnrich; + +import java.util.Collection; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class EnrichShardMultiSearchActionTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singletonList(LocalStateEnrich.class); + } + + public void testExecute() throws Exception { + XContentBuilder source = XContentBuilder.builder(XContentType.SMILE.xContent()); + source.startObject(); + source.field("key1", "value1"); + source.field("key2", "value2"); + source.endObject(); + + String indexName = EnrichPolicy.ENRICH_INDEX_NAME_BASE + "1"; + IndexRequest indexRequest = new IndexRequest(indexName); + indexRequest.source(source); + client().index(indexRequest).actionGet(); + client().admin().indices().refresh(new RefreshRequest(indexName)).actionGet(); + + int numSearches = randomIntBetween(2, 32); + MultiSearchRequest request = new MultiSearchRequest(); + for (int i = 0; i < numSearches; i++) { + SearchRequest searchRequest = new SearchRequest(indexName); + searchRequest.source().from(0); + searchRequest.source().size(1); + searchRequest.source().query(new MatchAllQueryBuilder()); + searchRequest.source().fetchSource("key1", null); + request.add(searchRequest); + } + + MultiSearchResponse result = + client().execute(EnrichShardMultiSearchAction.INSTANCE, new EnrichShardMultiSearchAction.Request(request)).actionGet(); + assertThat(result.getResponses().length, equalTo(numSearches)); + for (int i = 0; i < numSearches; i++) { + assertThat(result.getResponses()[i].isFailure(), is(false)); + assertThat(result.getResponses()[i].getResponse().getHits().getTotalHits().value, equalTo(1L)); + assertThat(result.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().size(), equalTo(1)); + assertThat(result.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().get("key1"), equalTo("value1")); + } + } + + public void testNonEnrichIndex() throws Exception { + createIndex("index"); + MultiSearchRequest request = new MultiSearchRequest(); + request.add(new SearchRequest("index")); + Exception e = expectThrows(ActionRequestValidationException.class, + () -> client().execute(EnrichShardMultiSearchAction.INSTANCE, new EnrichShardMultiSearchAction.Request(request)).actionGet()); + assertThat(e.getMessage(), equalTo("Validation Failed: 1: index [index] is not an enrich index;")); + } + + public void testMultipleShards() throws Exception { + String indexName = EnrichPolicy.ENRICH_INDEX_NAME_BASE + "1"; + createIndex(indexName, Settings.builder().put("index.number_of_shards", 2).build()); + MultiSearchRequest request = new MultiSearchRequest(); + request.add(new SearchRequest(indexName)); + Exception e = expectThrows(IllegalStateException.class, + () -> client().execute(EnrichShardMultiSearchAction.INSTANCE, new EnrichShardMultiSearchAction.Request(request)).actionGet()); + assertThat(e.getMessage(), equalTo("index [.enrich-1] should have 1 shard, but has 2 shards")); + } + + public void testMultipleIndices() throws Exception { + MultiSearchRequest request = new MultiSearchRequest(); + request.add(new SearchRequest("index1")); + request.add(new SearchRequest("index2")); + expectThrows(AssertionError.class, () -> new EnrichShardMultiSearchAction.Request(request)); + } + +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java new file mode 100644 index 00000000000..47b2a3396ae --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.ExecutingPolicy; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class EnrichStatsResponseTests extends AbstractWireSerializingTestCase { + + @Override + protected EnrichStatsAction.Response createTestInstance() { + int numExecutingPolicies = randomIntBetween(0, 16); + List executingPolicies = new ArrayList<>(numExecutingPolicies); + for (int i = 0; i < numExecutingPolicies; i++) { + TaskInfo taskInfo = randomTaskInfo(); + executingPolicies.add(new ExecutingPolicy(randomAlphaOfLength(4), taskInfo)); + } + int numCoordinatingStats = randomIntBetween(0, 16); + List coordinatorStats = new ArrayList<>(numCoordinatingStats); + for (int i = 0; i < numCoordinatingStats; i++) { + CoordinatorStats stats = new CoordinatorStats(randomAlphaOfLength(4), randomIntBetween(0, 8096), + randomIntBetween(0, 8096), randomNonNegativeLong(), randomNonNegativeLong()); + coordinatorStats.add(stats); + } + return new EnrichStatsAction.Response(executingPolicies, coordinatorStats); + } + + @Override + protected Writeable.Reader instanceReader() { + return EnrichStatsAction.Response::new; + } + + public static TaskInfo randomTaskInfo() { + TaskId taskId = new TaskId(randomAlphaOfLength(5), randomLong()); + String type = randomAlphaOfLength(5); + String action = randomAlphaOfLength(5); + String description = randomAlphaOfLength(5); + long startTime = randomLong(); + long runningTimeNanos = randomLong(); + boolean cancellable = randomBoolean(); + TaskId parentTaskId = TaskId.EMPTY_TASK_ID; + Map headers = randomBoolean() ? + Collections.emptyMap() : + Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)); + return new TaskInfo(taskId, type, action, description, null, startTime, runningTimeNanos, cancellable, parentTaskId, headers); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/ExecuteEnrichPolicyActionRequestTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/ExecuteEnrichPolicyActionRequestTests.java new file mode 100644 index 00000000000..5e7fb69a129 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/ExecuteEnrichPolicyActionRequestTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; + +public class ExecuteEnrichPolicyActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected ExecuteEnrichPolicyAction.Request createTestInstance() { + return new ExecuteEnrichPolicyAction.Request(randomAlphaOfLength(3)); + } + + @Override + protected Writeable.Reader instanceReader() { + return ExecuteEnrichPolicyAction.Request::new; + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java new file mode 100644 index 00000000000..9223423b4e8 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionRequestTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; + +public class GetEnrichPolicyActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected GetEnrichPolicyAction.Request createTestInstance() { + return new GetEnrichPolicyAction.Request(generateRandomStringArray(0, 4, false)); + } + + @Override + protected Writeable.Reader instanceReader() { + return GetEnrichPolicyAction.Request::new; + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java new file mode 100644 index 00000000000..e9181abec66 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/GetEnrichPolicyActionResponseTests.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.assertEqualPolicies; +import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.randomEnrichPolicy; +import static org.hamcrest.core.IsEqual.equalTo; + +public class GetEnrichPolicyActionResponseTests extends AbstractSerializingTestCase { + + @Override + protected GetEnrichPolicyAction.Response doParseInstance(XContentParser parser) throws IOException { + Map policies = new HashMap<>(); + assert parser.nextToken() == XContentParser.Token.START_OBJECT; + assert parser.nextToken() == XContentParser.Token.FIELD_NAME; + assert parser.currentName().equals("policies"); + assert parser.nextToken() == XContentParser.Token.START_ARRAY; + + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + assert token == XContentParser.Token.START_OBJECT; + assert parser.nextToken() == XContentParser.Token.FIELD_NAME; + assert parser.currentName().equals("config"); + assert parser.nextToken() == XContentParser.Token.START_OBJECT; + EnrichPolicy.NamedPolicy policy = EnrichPolicy.NamedPolicy.fromXContent(parser); + policies.put(policy.getName(), policy.getPolicy()); + assert parser.nextToken() == XContentParser.Token.END_OBJECT; + } + + return new GetEnrichPolicyAction.Response(policies); + } + + @Override + protected GetEnrichPolicyAction.Response createTestInstance() { + Map items = new HashMap<>(); + for (int i = 0; i < randomIntBetween(0, 3); i++) { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + items.put(randomAlphaOfLength(3), policy); + } + return new GetEnrichPolicyAction.Response(items); + } + + @Override + protected Writeable.Reader instanceReader() { + return GetEnrichPolicyAction.Response::new; + } + + @Override + protected void assertEqualInstances(GetEnrichPolicyAction.Response expectedInstance, GetEnrichPolicyAction.Response newInstance) { + assertNotSame(expectedInstance, newInstance); + // the tests shuffle around the policy query source xcontent type, so this is needed here + assertThat(expectedInstance.getPolicies().size(), equalTo(newInstance.getPolicies().size())); + // since the backing store is a treemap the list will be sorted so we can just check each + // instance is the same + for (int i = 0; i < expectedInstance.getPolicies().size(); i++) { + EnrichPolicy.NamedPolicy expected = expectedInstance.getPolicies().get(i); + EnrichPolicy.NamedPolicy newed = newInstance.getPolicies().get(i); + assertThat(expected.getName(), equalTo(newed.getName())); + assertEqualPolicies(expected.getPolicy(), newed.getPolicy()); + } + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/PutEnrichPolicyActionRequestTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/PutEnrichPolicyActionRequestTests.java new file mode 100644 index 00000000000..540356f7edd --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/PutEnrichPolicyActionRequestTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.randomEnrichPolicy; + +public class PutEnrichPolicyActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected PutEnrichPolicyAction.Request createTestInstance() { + final EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + return new PutEnrichPolicyAction.Request(randomAlphaOfLength(3), policy); + } + + @Override + protected Writeable.Reader instanceReader() { + return PutEnrichPolicyAction.Request::new; + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyActionTests.java new file mode 100644 index 00000000000..9d03fa1b8e4 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyActionTests.java @@ -0,0 +1,227 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.AbstractEnrichTestCase; +import org.elasticsearch.xpack.enrich.EnrichPolicyLocks; +import org.elasticsearch.xpack.enrich.EnrichStore; +import org.junit.After; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.randomEnrichPolicy; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.IsInstanceOf.instanceOf; + +public class TransportDeleteEnrichPolicyActionTests extends AbstractEnrichTestCase { + + @After + private void cleanupPolicy() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + + try { + deleteEnrichPolicy(name, clusterService); + } catch (Exception e) { + // if the enrich policy does not exist, then just keep going + } + + // fail if the state of this is left locked + EnrichPolicyLocks enrichPolicyLocks = getInstanceFromNode(EnrichPolicyLocks.class); + assertFalse(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + } + + public void testDeletePolicyDoesNotExistUnlocksPolicy() throws InterruptedException { + String fakeId = "fake-id"; + createIndex(EnrichPolicy.getBaseName(fakeId) + "-foo1"); + createIndex(EnrichPolicy.getBaseName(fakeId) + "-foo2"); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportDeleteEnrichPolicyAction transportAction = node().injector().getInstance(TransportDeleteEnrichPolicyAction.class); + transportAction.execute(null, + new DeleteEnrichPolicyAction.Request(fakeId), + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + fail(); + } + + public void onFailure(final Exception e) { + reference.set(e); + latch.countDown(); + } + }); + latch.await(); + assertNotNull(reference.get()); + assertThat(reference.get(), instanceOf(ResourceNotFoundException.class)); + assertThat(reference.get().getMessage(), equalTo("policy [fake-id] not found")); + + // fail if the state of this is left locked + EnrichPolicyLocks enrichPolicyLocks = getInstanceFromNode(EnrichPolicyLocks.class); + assertFalse(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + } + + public void testDeleteWithoutIndex() throws Exception { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportDeleteEnrichPolicyAction transportAction = node().injector().getInstance(TransportDeleteEnrichPolicyAction.class); + transportAction.execute(null, + new DeleteEnrichPolicyAction.Request(name), + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + reference.set(acknowledgedResponse); + latch.countDown(); + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + assertTrue(reference.get().isAcknowledged()); + + EnrichPolicyLocks enrichPolicyLocks = getInstanceFromNode(EnrichPolicyLocks.class); + assertFalse(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + + assertNull(EnrichStore.getPolicy(name, clusterService.state())); + } + + public void testDeleteIsNotLocked() throws Exception { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + createIndex(EnrichPolicy.getBaseName(name) + "-foo1"); + createIndex(EnrichPolicy.getBaseName(name) + "-foo2"); + + client().admin().indices().prepareGetIndex().setIndices( + EnrichPolicy.getBaseName(name) + "-foo1", + EnrichPolicy.getBaseName(name) + "-foo2").get(); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportDeleteEnrichPolicyAction transportAction = node().injector().getInstance(TransportDeleteEnrichPolicyAction.class); + transportAction.execute(null, + new DeleteEnrichPolicyAction.Request(name), + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + reference.set(acknowledgedResponse); + latch.countDown(); + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + assertTrue(reference.get().isAcknowledged()); + + expectThrows(IndexNotFoundException.class, () -> client().admin().indices().prepareGetIndex().setIndices( + EnrichPolicy.getBaseName(name) + "-foo1", + EnrichPolicy.getBaseName(name) + "-foo2").get()); + + EnrichPolicyLocks enrichPolicyLocks = getInstanceFromNode(EnrichPolicyLocks.class); + assertFalse(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + + assertNull(EnrichStore.getPolicy(name, clusterService.state())); + } + + public void testDeleteLocked() throws InterruptedException { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + final TransportDeleteEnrichPolicyAction transportAction = node().injector().getInstance(TransportDeleteEnrichPolicyAction.class); + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + createIndex(EnrichPolicy.getBaseName(name) + "-foo1"); + createIndex(EnrichPolicy.getBaseName(name) + "-foo2"); + + EnrichPolicyLocks enrichPolicyLocks = getInstanceFromNode(EnrichPolicyLocks.class); + assertFalse(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + + enrichPolicyLocks.lockPolicy(name); + assertTrue(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + + { + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + transportAction.execute(null, + new DeleteEnrichPolicyAction.Request(name), + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + fail(); + } + + public void onFailure(final Exception e) { + reference.set(e); + latch.countDown(); + } + }); + latch.await(); + assertNotNull(reference.get()); + assertThat(reference.get(), instanceOf(EsRejectedExecutionException.class)); + assertThat(reference.get().getMessage(), + equalTo("Could not obtain lock because policy execution for [my-policy] is already in progress.")); + } + { + enrichPolicyLocks.releasePolicy(name); + assertFalse(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + + transportAction.execute(null, + new DeleteEnrichPolicyAction.Request(name), + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + reference.set(acknowledgedResponse); + latch.countDown(); + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + assertTrue(reference.get().isAcknowledged()); + + assertFalse(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + + assertNull(EnrichStore.getPolicy(name, clusterService.state())); + } + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java new file mode 100644 index 00000000000..62ac1bf3766 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/TransportGetEnrichPolicyActionTests.java @@ -0,0 +1,234 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.enrich.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.AbstractEnrichTestCase; +import org.elasticsearch.xpack.enrich.EnrichPolicyLocks; +import org.junit.After; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.assertEqualPolicies; +import static org.elasticsearch.xpack.enrich.EnrichPolicyTests.randomEnrichPolicy; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class TransportGetEnrichPolicyActionTests extends AbstractEnrichTestCase { + + @After + private void cleanupPolicies() throws InterruptedException { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); + transportAction.execute(null, + new GetEnrichPolicyAction.Request(), + new ActionListener() { + @Override + public void onResponse(GetEnrichPolicyAction.Response response) { + reference.set(response); + latch.countDown(); + + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + GetEnrichPolicyAction.Response response = reference.get(); + + for (EnrichPolicy.NamedPolicy policy: response.getPolicies()) { + try { + deleteEnrichPolicy(policy.getName(), clusterService); + } catch (Exception e) { + // if the enrich policy does not exist, then just keep going + } + } + + // fail if the state of this is left locked + EnrichPolicyLocks enrichPolicyLocks = getInstanceFromNode(EnrichPolicyLocks.class); + assertFalse(enrichPolicyLocks.captureExecutionState().isAnyPolicyInFlight()); + } + + public void testListPolicies() throws InterruptedException { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); + transportAction.execute(null, + // empty or null should return the same + randomBoolean() ? new GetEnrichPolicyAction.Request() : + new GetEnrichPolicyAction.Request(new String[]{}), + new ActionListener() { + @Override + public void onResponse(GetEnrichPolicyAction.Response response) { + reference.set(response); + latch.countDown(); + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + GetEnrichPolicyAction.Response response = reference.get(); + + assertThat(response.getPolicies().size(), equalTo(1)); + + EnrichPolicy.NamedPolicy actualPolicy = response.getPolicies().get(0); + assertThat(name, equalTo(actualPolicy.getName())); + assertEqualPolicies(policy, actualPolicy.getPolicy()); + } + + public void testListEmptyPolicies() throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); + transportAction.execute(null, + new GetEnrichPolicyAction.Request(), + new ActionListener() { + @Override + public void onResponse(GetEnrichPolicyAction.Response response) { + reference.set(response); + latch.countDown(); + + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + GetEnrichPolicyAction.Response response = reference.get(); + + assertThat(response.getPolicies().size(), equalTo(0)); + } + + public void testGetPolicy() throws InterruptedException { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + // save a second one to verify the count below on GET + error = saveEnrichPolicy("something-else", randomEnrichPolicy(XContentType.JSON), clusterService); + assertThat(error.get(), nullValue()); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); + transportAction.execute(null, + new GetEnrichPolicyAction.Request(new String[]{name}), + new ActionListener() { + @Override + public void onResponse(GetEnrichPolicyAction.Response response) { + reference.set(response); + latch.countDown(); + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + GetEnrichPolicyAction.Response response = reference.get(); + + assertThat(response.getPolicies().size(), equalTo(1)); + + EnrichPolicy.NamedPolicy actualPolicy = response.getPolicies().get(0); + assertThat(name, equalTo(actualPolicy.getName())); + assertEqualPolicies(policy, actualPolicy.getPolicy()); + } + + public void testGetMultiplePolicies() throws InterruptedException { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + String anotherName = "my-other-policy"; + + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + error = saveEnrichPolicy(anotherName, policy, clusterService); + assertThat(error.get(), nullValue()); + + // save a second one to verify the count below on GET + error = saveEnrichPolicy("something-else", randomEnrichPolicy(XContentType.JSON), clusterService); + assertThat(error.get(), nullValue()); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); + transportAction.execute(null, + new GetEnrichPolicyAction.Request(new String[]{name, anotherName}), + new ActionListener() { + @Override + public void onResponse(GetEnrichPolicyAction.Response response) { + reference.set(response); + latch.countDown(); + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + GetEnrichPolicyAction.Response response = reference.get(); + + assertThat(response.getPolicies().size(), equalTo(2)); + } + + public void testGetPolicyThrowsError() throws InterruptedException { + EnrichPolicy policy = randomEnrichPolicy(XContentType.JSON); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + String name = "my-policy"; + + AtomicReference error = saveEnrichPolicy(name, policy, clusterService); + assertThat(error.get(), nullValue()); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference reference = new AtomicReference<>(); + final TransportGetEnrichPolicyAction transportAction = node().injector().getInstance(TransportGetEnrichPolicyAction.class); + transportAction.execute(null, + new GetEnrichPolicyAction.Request(new String[]{"non-exists"}), + new ActionListener() { + @Override + public void onResponse(GetEnrichPolicyAction.Response response) { + reference.set(response); + latch.countDown(); + } + + public void onFailure(final Exception e) { + fail(); + } + }); + latch.await(); + assertNotNull(reference.get()); + assertThat(reference.get().getPolicies().size(), equalTo(0)); + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDocTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDocTests.java new file mode 100644 index 00000000000..fc9d98e5159 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDocTests.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.enrich; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; +import org.elasticsearch.xpack.monitoring.exporter.BaseMonitoringDocTestCase; + +import java.io.IOException; +import java.time.ZoneOffset; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class EnrichCoordinatorDocTests extends BaseMonitoringDocTestCase { + + static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_time").withZone(ZoneOffset.UTC); + + private CoordinatorStats stats; + + @Override + public void setUp() throws Exception { + super.setUp(); + stats = new CoordinatorStats( + randomAlphaOfLength(4), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + randomNonNegativeLong(), + randomNonNegativeLong() + ); + } + + @Override + protected EnrichCoordinatorDoc createMonitoringDoc(String cluster, + long timestamp, + long interval, + MonitoringDoc.Node node, + MonitoredSystem system, + String type, + String id) { + + return new EnrichCoordinatorDoc(cluster, timestamp, interval, node, stats); + } + + @Override + protected void assertMonitoringDoc(EnrichCoordinatorDoc document) { + assertThat(document.getSystem(), is(MonitoredSystem.ES)); + assertThat(document.getType(), is(EnrichCoordinatorDoc.TYPE)); + assertThat(document.getId(), nullValue()); + assertThat(document.getCoordinatorStats(), equalTo(stats)); + } + + @Override + public void testToXContent() throws IOException { + final long timestamp = System.currentTimeMillis(); + final long intervalMillis = System.currentTimeMillis(); + final long nodeTimestamp = System.currentTimeMillis(); + final MonitoringDoc.Node node = new MonitoringDoc.Node("_uuid", "_host", "_addr", "_ip", "_name", nodeTimestamp); + + final EnrichCoordinatorDoc document = new EnrichCoordinatorDoc("_cluster", timestamp, intervalMillis, node, stats); + final BytesReference xContent = XContentHelper.toXContent(document, XContentType.JSON, false); + assertThat(xContent.utf8ToString(), equalTo( + "{" + + "\"cluster_uuid\":\"_cluster\"," + + "\"timestamp\":\"" + DATE_TIME_FORMATTER.formatMillis(timestamp) + "\"," + + "\"interval_ms\":" + intervalMillis + "," + + "\"type\":\"enrich_coordinator_stats\"," + + "\"source_node\":{" + + "\"uuid\":\"_uuid\"," + + "\"host\":\"_host\"," + + "\"transport_address\":\"_addr\"," + + "\"ip\":\"_ip\"," + + "\"name\":\"_name\"," + + "\"timestamp\":\"" + DATE_TIME_FORMATTER.formatMillis(nodeTimestamp) + "\"" + + "}," + + "\"enrich_coordinator_stats\":{" + + "\"node_id\":\"" + stats.getNodeId() + "\"," + + "\"queue_size\":" + stats.getQueueSize() + "," + + "\"remote_requests_current\":" + stats.getRemoteRequestsCurrent() + "," + + "\"remote_requests_total\":" + stats.getRemoteRequestsTotal() + "," + + "\"executed_searches_total\":" + stats.getExecutedSearchesTotal() + + "}" + + "}" + )); + } + + public void testEnrichCoordinatorStatsFieldsMapped() throws IOException { + XContentBuilder builder = jsonBuilder(); + builder.startObject(); + builder.value(stats); + builder.endObject(); + Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); + + Map template = + XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false); + Map followStatsMapping = (Map) XContentMapValues + .extractValue("mappings._doc.properties.enrich_coordinator_stats.properties", template); + assertThat(serializedStatus.size(), equalTo(followStatsMapping.size())); + for (Map.Entry entry : serializedStatus.entrySet()) { + String fieldName = entry.getKey(); + Map fieldMapping = (Map) followStatsMapping.get(fieldName); + assertThat("no field mapping for field [" + fieldName + "]", fieldMapping, notNullValue()); + + Object fieldValue = entry.getValue(); + String fieldType = (String) fieldMapping.get("type"); + if (fieldValue instanceof Long || fieldValue instanceof Integer) { + assertThat("expected long field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("long"), equalTo("integer"))); + } else if (fieldValue instanceof String) { + assertThat("expected keyword field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("keyword"), equalTo("text"))); + } else { + // Manual test specific object fields and if not just fail: + fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); + } + } + } +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java new file mode 100644 index 00000000000..c18372f3367 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java @@ -0,0 +1,216 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.enrich; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.ExecutingPolicy; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.monitoring.BaseCollectorTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.enrich.action.EnrichStatsResponseTests.randomTaskInfo; +import static org.elasticsearch.xpack.monitoring.MonitoringTestUtils.randomMonitoringNode; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class EnrichStatsCollectorTests extends BaseCollectorTestCase { + + public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { + final Settings settings = randomFrom(enrichEnabledSettings(), enrichDisabledSettings()); + final boolean enrichAllowed = randomBoolean(); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + // this controls the blockage + when(licenseState.isMonitoringAllowed()).thenReturn(false); + when(licenseState.isEnrichAllowed()).thenReturn(enrichAllowed); + + final EnrichStatsCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsFalseIfNotMaster() { + // regardless of enrich being enabled + final Settings settings = randomFrom(enrichEnabledSettings(), enrichDisabledSettings()); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + when(licenseState.isEnrichAllowed()).thenReturn(randomBoolean()); + // this controls the blockage + final boolean isElectedMaster = false; + + final EnrichStatsCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + } + + public void testShouldCollectReturnsFalseIfEnrichIsDisabled() { + // this is controls the blockage + final Settings settings = enrichDisabledSettings(); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + when(licenseState.isEnrichAllowed()).thenReturn(randomBoolean()); + + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + final EnrichStatsCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsFalseIfEnrichIsNotAllowed() { + final Settings settings = randomFrom(enrichEnabledSettings(), enrichDisabledSettings()); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + // this is controls the blockage + when(licenseState.isEnrichAllowed()).thenReturn(false); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + final EnrichStatsCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsTrue() { + final Settings settings = enrichEnabledSettings(); + + when(licenseState.isMonitoringAllowed()).thenReturn(true); + when(licenseState.isEnrichAllowed()).thenReturn(true); + final boolean isElectedMaster = true; + + final EnrichStatsCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(true)); + + verify(licenseState).isMonitoringAllowed(); + } + + public void testDoCollect() throws Exception { + final String clusterUuid = randomAlphaOfLength(5); + whenClusterStateWithUUID(clusterUuid); + + final MonitoringDoc.Node node = randomMonitoringNode(random()); + final Client client = mock(Client.class); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + + final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); + withCollectionTimeout(EnrichStatsCollector.STATS_TIMEOUT, timeout); + + int numExecutingPolicies = randomIntBetween(0, 8); + List executingPolicies = new ArrayList<>(numExecutingPolicies); + for (int i = 0; i < numExecutingPolicies; i++) { + executingPolicies.add(new ExecutingPolicy(randomAlphaOfLength(4), randomTaskInfo())); + } + int numCoordinatorStats = randomIntBetween(0, 8); + List coordinatorStats = new ArrayList<>(numCoordinatorStats); + for (int i = 0; i < numCoordinatorStats; i++) { + coordinatorStats.add(new CoordinatorStats( + randomAlphaOfLength(4), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + randomNonNegativeLong(), + randomNonNegativeLong() + )); + } + + + @SuppressWarnings("unchecked") + final ActionFuture future = (ActionFuture) mock(ActionFuture.class); + final EnrichStatsAction.Response response = new EnrichStatsAction.Response(executingPolicies, coordinatorStats); + + when(client.execute(eq(EnrichStatsAction.INSTANCE), any(EnrichStatsAction.Request.class))).thenReturn(future); + when(future.actionGet(timeout)).thenReturn(response); + + final EnrichStatsCollector collector = + new EnrichStatsCollector(clusterService, licenseState, client, threadContext, settings); + assertEquals(timeout, collector.getCollectionTimeout()); + + final long interval = randomNonNegativeLong(); + final List documents = new ArrayList<>(collector.doCollect(node, interval, clusterState)); + verify(clusterState).metaData(); + verify(metaData).clusterUUID(); + + assertThat(documents, hasSize(executingPolicies.size() + coordinatorStats.size())); + + for (int i = 0; i < coordinatorStats.size(); i++) { + final EnrichCoordinatorDoc actual = (EnrichCoordinatorDoc) documents.get(i); + final CoordinatorStats expected = coordinatorStats.get(i); + + assertThat(actual.getCluster(), is(clusterUuid)); + assertThat(actual.getTimestamp(), greaterThan(0L)); + assertThat(actual.getIntervalMillis(), equalTo(interval)); + assertThat(actual.getNode(), equalTo(node)); + assertThat(actual.getSystem(), is(MonitoredSystem.ES)); + assertThat(actual.getType(), is(EnrichCoordinatorDoc.TYPE)); + assertThat(actual.getId(), nullValue()); + assertThat(actual.getCoordinatorStats(), equalTo(expected)); + } + + for (int i = coordinatorStats.size(); i < documents.size(); i++) { + final ExecutingPolicyDoc actual = (ExecutingPolicyDoc) documents.get(i); + final ExecutingPolicy expected = executingPolicies.get(i - coordinatorStats.size()); + + assertThat(actual.getCluster(), is(clusterUuid)); + assertThat(actual.getTimestamp(), greaterThan(0L)); + assertThat(actual.getIntervalMillis(), equalTo(interval)); + assertThat(actual.getNode(), equalTo(node)); + assertThat(actual.getSystem(), is(MonitoredSystem.ES)); + assertThat(actual.getType(), is(ExecutingPolicyDoc.TYPE)); + assertThat(actual.getId(), nullValue()); + assertThat(actual.getExecutingPolicy(), equalTo(expected)); + } + } + + private EnrichStatsCollector createCollector(Settings settings, + ClusterService clusterService, + XPackLicenseState licenseState, + Client client) { + return new EnrichStatsCollector(clusterService, licenseState, client, settings); + } + + private Settings enrichEnabledSettings() { + // since it's the default, we want to ensure we test both with/without it + return randomBoolean() ? Settings.EMPTY : Settings.builder().put(XPackSettings.ENRICH_ENABLED_SETTING.getKey(), true).build(); + } + + private Settings enrichDisabledSettings() { + return Settings.builder().put(XPackSettings.ENRICH_ENABLED_SETTING.getKey(), false).build(); + } + +} diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java new file mode 100644 index 00000000000..a36c2d8af92 --- /dev/null +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.enrich; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.ExecutingPolicy; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; +import org.elasticsearch.xpack.monitoring.exporter.BaseMonitoringDocTestCase; + +import java.io.IOException; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.enrich.action.EnrichStatsResponseTests.randomTaskInfo; +import static org.elasticsearch.xpack.monitoring.collector.enrich.EnrichCoordinatorDocTests.DATE_TIME_FORMATTER; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class ExecutingPolicyDocTests extends BaseMonitoringDocTestCase { + + private ExecutingPolicy executingPolicy; + + @Override + public void setUp() throws Exception { + super.setUp(); + executingPolicy = new ExecutingPolicy( + randomAlphaOfLength(4), + randomTaskInfo() + ); + } + + @Override + protected ExecutingPolicyDoc createMonitoringDoc(String cluster, + long timestamp, + long interval, + MonitoringDoc.Node node, + MonitoredSystem system, + String type, + String id) { + + return new ExecutingPolicyDoc(cluster, timestamp, interval, node, executingPolicy); + } + + @Override + protected void assertMonitoringDoc(ExecutingPolicyDoc document) { + assertThat(document.getSystem(), is(MonitoredSystem.ES)); + assertThat(document.getType(), is(ExecutingPolicyDoc.TYPE)); + assertThat(document.getId(), nullValue()); + assertThat(document.getExecutingPolicy(), equalTo(executingPolicy)); + } + + @Override + public void testToXContent() throws IOException { + final long timestamp = System.currentTimeMillis(); + final long intervalMillis = System.currentTimeMillis(); + final long nodeTimestamp = System.currentTimeMillis(); + final MonitoringDoc.Node node = new MonitoringDoc.Node("_uuid", "_host", "_addr", "_ip", "_name", nodeTimestamp); + + final ExecutingPolicyDoc document = new ExecutingPolicyDoc("_cluster", timestamp, intervalMillis, node, executingPolicy); + final BytesReference xContent = XContentHelper.toXContent(document, XContentType.JSON, false); + Optional> header = + executingPolicy.getTaskInfo().getHeaders().entrySet().stream().findAny(); + assertThat(xContent.utf8ToString(), equalTo( + "{" + + "\"cluster_uuid\":\"_cluster\"," + + "\"timestamp\":\"" + DATE_TIME_FORMATTER.formatMillis(timestamp) + "\"," + + "\"interval_ms\":" + intervalMillis + "," + + "\"type\":\"enrich_executing_policy_stats\"," + + "\"source_node\":{" + + "\"uuid\":\"_uuid\"," + + "\"host\":\"_host\"," + + "\"transport_address\":\"_addr\"," + + "\"ip\":\"_ip\"," + + "\"name\":\"_name\"," + + "\"timestamp\":\"" + DATE_TIME_FORMATTER.formatMillis(nodeTimestamp) + "\"" + + "}," + + "\"enrich_executing_policy_stats\":{" + + "\"name\":\"" + executingPolicy.getName() + "\"," + + "\"task\":{" + + "\"node\":\"" + executingPolicy.getTaskInfo().getTaskId().getNodeId() + "\"," + + "\"id\":" + executingPolicy.getTaskInfo().getTaskId().getId() + "," + + "\"type\":\"" + executingPolicy.getTaskInfo().getType() + "\"," + + "\"action\":\"" + executingPolicy.getTaskInfo().getAction() + "\"," + + "\"description\":\"" + executingPolicy.getTaskInfo().getDescription() + "\"," + + "\"start_time_in_millis\":" + executingPolicy.getTaskInfo().getStartTime() + "," + + "\"running_time_in_nanos\":" + executingPolicy.getTaskInfo().getRunningTimeNanos() + "," + + "\"cancellable\":" + executingPolicy.getTaskInfo().isCancellable() + "," + + header + .map(entry -> String.format(Locale.ROOT, "\"headers\":{\"%s\":\"%s\"}", entry.getKey(), entry.getValue())) + .orElse("\"headers\":{}") + + "}" + + "}" + + "}" + )); + } + + public void testEnrichCoordinatorStatsFieldsMapped() throws IOException { + XContentBuilder builder = jsonBuilder(); + builder.startObject(); + builder.value(executingPolicy); + builder.endObject(); + Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); + + Map template = + XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false); + Map followStatsMapping = (Map) XContentMapValues + .extractValue("mappings._doc.properties.enrich_executing_policy_stats.properties", template); + assertThat(serializedStatus.size(), equalTo(followStatsMapping.size())); + for (Map.Entry entry : serializedStatus.entrySet()) { + String fieldName = entry.getKey(); + Map fieldMapping = (Map) followStatsMapping.get(fieldName); + assertThat("no field mapping for field [" + fieldName + "]", fieldMapping, notNullValue()); + + Object fieldValue = entry.getValue(); + String fieldType = (String) fieldMapping.get("type"); + if (fieldValue instanceof Long || fieldValue instanceof Integer) { + assertThat("expected long field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("long"), equalTo("integer"))); + } else if (fieldValue instanceof String) { + assertThat("expected keyword field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("keyword"), equalTo("text"))); + } else { + if (fieldName.equals("task")) { + assertThat(fieldType, equalTo("object")); + assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(8)); + assertThat(XContentMapValues.extractValue("properties.node.type", fieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("properties.id.type", fieldMapping), equalTo("long")); + assertThat(XContentMapValues.extractValue("properties.type.type", fieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("properties.action.type", fieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("properties.description.type", fieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("properties.start_time_in_millis.type", fieldMapping), equalTo("date")); + assertThat(XContentMapValues.extractValue("properties.cancellable.type", fieldMapping), equalTo("boolean")); + } else { + // Manual test specific object fields and if not just fail: + fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); + } + } + } + } +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 5d03a066d75..29fd28c9561 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.collector.Collector; import org.elasticsearch.xpack.monitoring.collector.ccr.StatsCollector; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsCollector; +import org.elasticsearch.xpack.monitoring.collector.enrich.EnrichStatsCollector; import org.elasticsearch.xpack.monitoring.collector.indices.IndexRecoveryCollector; import org.elasticsearch.xpack.monitoring.collector.indices.IndexStatsCollector; import org.elasticsearch.xpack.monitoring.collector.ml.JobStatsCollector; @@ -144,6 +145,7 @@ public class Monitoring extends Plugin implements ActionPlugin { collectors.add(new IndexRecoveryCollector(clusterService, getLicenseState(), client)); collectors.add(new JobStatsCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new StatsCollector(settings, clusterService, getLicenseState(), client)); + collectors.add(new EnrichStatsCollector(clusterService, getLicenseState(), client, settings)); final MonitoringService monitoringService = new MonitoringService(settings, clusterService, threadPool, collectors, exporters); @@ -184,6 +186,7 @@ public class Monitoring extends Plugin implements ActionPlugin { settings.add(JobStatsCollector.JOB_STATS_TIMEOUT); settings.add(StatsCollector.CCR_STATS_TIMEOUT); settings.add(NodeStatsCollector.NODE_STATS_TIMEOUT); + settings.add(EnrichStatsCollector.STATS_TIMEOUT); settings.addAll(Exporters.getSettings()); return Collections.unmodifiableList(settings); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDoc.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDoc.java new file mode 100644 index 00000000000..f8d48512656 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDoc.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.enrich; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; + +import java.io.IOException; +import java.util.Objects; + +public final class EnrichCoordinatorDoc extends MonitoringDoc { + + public static final String TYPE = "enrich_coordinator_stats"; + + private final CoordinatorStats coordinatorStats; + + public EnrichCoordinatorDoc(String cluster, + long timestamp, + long intervalMillis, + MonitoringDoc.Node node, + CoordinatorStats coordinatorStats) { + super(cluster, timestamp, intervalMillis, node, MonitoredSystem.ES, TYPE, null); + this.coordinatorStats = Objects.requireNonNull(coordinatorStats, "stats"); + } + + public CoordinatorStats getCoordinatorStats() { + return coordinatorStats; + } + + @Override + protected void innerToXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(TYPE); + { + coordinatorStats.toXContent(builder, params); + } + builder.endObject(); + } +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollector.java new file mode 100644 index 00000000000..c2162c289df --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollector.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.enrich; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.monitoring.collector.Collector; + +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; + +public final class EnrichStatsCollector extends Collector { + + public static final Setting STATS_TIMEOUT = collectionTimeoutSetting("enrich.stats.timeout"); + + private final Client client; + private final Settings settings; + private final ThreadContext threadContext; + + public EnrichStatsCollector(ClusterService clusterService, + XPackLicenseState licenseState, + Client client, + Settings settings) { + this(clusterService, licenseState, client, client.threadPool().getThreadContext(), settings); + } + + EnrichStatsCollector(ClusterService clusterService, + XPackLicenseState licenseState, + Client client, + ThreadContext threadContext, + Settings settings) { + super(EnrichCoordinatorDoc.TYPE, clusterService, STATS_TIMEOUT, licenseState); + this.client = client; + this.settings = settings; + this.threadContext = threadContext; + } + + @Override + protected boolean shouldCollect(final boolean isElectedMaster) { + return isElectedMaster + && super.shouldCollect(isElectedMaster) + && XPackSettings.ENRICH_ENABLED_SETTING.get(settings) + && licenseState.isEnrichAllowed(); + } + + @Override + protected Collection doCollect(MonitoringDoc.Node node, long interval, ClusterState clusterState) throws Exception { + try (ThreadContext.StoredContext ignore = threadContext.stashWithOrigin(MONITORING_ORIGIN)) { + final long timestamp = timestamp(); + final String clusterUuid = clusterUuid(clusterState); + + final EnrichStatsAction.Request request = new EnrichStatsAction.Request(); + final EnrichStatsAction.Response response = + client.execute(EnrichStatsAction.INSTANCE, request).actionGet(getCollectionTimeout()); + + final List docs = response.getCoordinatorStats().stream() + .map(stats -> new EnrichCoordinatorDoc(clusterUuid, timestamp, interval, node, stats)) + .collect(Collectors.toList()); + + response.getExecutingPolicies().stream() + .map(stats -> new ExecutingPolicyDoc(clusterUuid, timestamp, interval, node, stats)) + .forEach(docs::add); + + return docs; + } + } +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDoc.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDoc.java new file mode 100644 index 00000000000..724e9f8d9a2 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDoc.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.enrich; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.ExecutingPolicy; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; + +import java.io.IOException; +import java.util.Objects; + +public final class ExecutingPolicyDoc extends MonitoringDoc { + + public static final String TYPE = "enrich_executing_policy_stats"; + + private final ExecutingPolicy executingPolicy; + + public ExecutingPolicyDoc(String cluster, + long timestamp, + long intervalMillis, + Node node, + ExecutingPolicy coordinatorStats) { + super(cluster, timestamp, intervalMillis, node, MonitoredSystem.ES, TYPE, null); + this.executingPolicy = Objects.requireNonNull(coordinatorStats, "stats"); + } + + public ExecutingPolicy getExecutingPolicy() { + return executingPolicy; + } + + @Override + protected void innerToXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(TYPE); + { + executingPolicy.toXContent(builder, params); + } + builder.endObject(); + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.delete_policy.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.delete_policy.json new file mode 100644 index 00000000000..a775ca28798 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.delete_policy.json @@ -0,0 +1,20 @@ +{ + "enrich.delete_policy": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-delete-policy.html", + "stability" : "stable", + "url": { + "paths": [ + { + "path": "/_enrich/policy/{name}", + "methods": [ "DELETE" ], + "parts": { + "name": { + "type" : "string", + "description" : "The name of the enrich policy" + } + } + } + ] + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.execute_policy.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.execute_policy.json new file mode 100644 index 00000000000..b49486a0620 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.execute_policy.json @@ -0,0 +1,27 @@ +{ + "enrich.execute_policy": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-execute-policy.html", + "stability" : "stable", + "url": { + "paths": [ + { + "path": "/_enrich/policy/{name}/_execute", + "methods": [ "PUT" ], + "parts": { + "name": { + "type" : "string", + "description" : "The name of the enrich policy" + } + } + } + ] + }, + "params":{ + "wait_for_completion":{ + "type":"boolean", + "default":true, + "description":"Should the request should block until the execution is complete." + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.get_policy.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.get_policy.json new file mode 100644 index 00000000000..685c24ea228 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.get_policy.json @@ -0,0 +1,24 @@ +{ + "enrich.get_policy": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-get-policy.html", + "stability" : "stable", + "url": { + "paths": [ + { + "path": "/_enrich/policy/{name}", + "methods": [ "GET" ], + "parts": { + "name": { + "type" : "string", + "description" : "The name of the enrich policy" + } + } + }, + { + "path": "/_enrich/policy/", + "methods": [ "GET" ] + } + ] + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.put_policy.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.put_policy.json new file mode 100644 index 00000000000..7a1c02f92b9 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.put_policy.json @@ -0,0 +1,24 @@ +{ + "enrich.put_policy": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-put-policy.html", + "stability" : "stable", + "url": { + "paths": [ + { + "path": "/_enrich/policy/{name}", + "methods": [ "PUT" ], + "parts": { + "name": { + "type" : "string", + "description" : "The name of the enrich policy" + } + } + } + ] + }, + "body": { + "description": "The enrich policy to register", + "required": true + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.stats.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.stats.json new file mode 100644 index 00000000000..0dda96d81fa --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/enrich.stats.json @@ -0,0 +1,14 @@ +{ + "enrich.stats": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-stats.html", + "stability" : "stable", + "url": { + "paths": [ + { + "path": "/_enrich/_stats", + "methods": [ "GET" ] + } + ] + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/11_builtin.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/11_builtin.yml index 836977b0a0c..c7130faf277 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/11_builtin.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/11_builtin.yml @@ -15,5 +15,5 @@ setup: # This is fragile - it needs to be updated every time we add a new cluster/index privilege # I would much prefer we could just check that specific entries are in the array, but we don't have # an assertion for that - - length: { "cluster" : 32 } + - length: { "cluster" : 33 } - length: { "index" : 17 } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml index 4fd755d5fed..7293e361e10 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml @@ -16,6 +16,7 @@ - contains: { nodes.$master.modules: { name: x-pack-ccr } } - contains: { nodes.$master.modules: { name: x-pack-core } } - contains: { nodes.$master.modules: { name: x-pack-deprecation } } + - contains: { nodes.$master.modules: { name: x-pack-enrich } } - contains: { nodes.$master.modules: { name: x-pack-graph } } - contains: { nodes.$master.modules: { name: x-pack-ilm } } - contains: { nodes.$master.modules: { name: x-pack-logstash } } diff --git a/x-pack/qa/transport-client-tests/build.gradle b/x-pack/qa/transport-client-tests/build.gradle index 8e52ff5783f..ae4722a0474 100644 --- a/x-pack/qa/transport-client-tests/build.gradle +++ b/x-pack/qa/transport-client-tests/build.gradle @@ -17,7 +17,7 @@ testingConventions { naming.clear() naming { IT { - baseClass 'org.elasticsearch.xpack.ml.client.ESXPackSmokeClientTestCase' + baseClass 'org.elasticsearch.xpack.ESXPackSmokeClientTestCase' } } } diff --git a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ESXPackSmokeClientTestCase.java similarity index 99% rename from x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java rename to x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ESXPackSmokeClientTestCase.java index 28267614dd3..e8d886330ae 100644 --- a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java +++ b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ESXPackSmokeClientTestCase.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.client; +package org.elasticsearch.xpack; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; diff --git a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/enrich/client/EnrichTransportClientIT.java b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/enrich/client/EnrichTransportClientIT.java new file mode 100644 index 00000000000..34db7e84009 --- /dev/null +++ b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/enrich/client/EnrichTransportClientIT.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.enrich.client; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.xpack.ESXPackSmokeClientTestCase; +import org.elasticsearch.xpack.core.XPackClient; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.client.EnrichClient; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class EnrichTransportClientIT extends ESXPackSmokeClientTestCase { + + private static void assertEqualPolicies(EnrichPolicy expectedInstance, EnrichPolicy newInstance) { + assertThat(newInstance.getType(), equalTo(expectedInstance.getType())); + if (newInstance.getQuery() != null) { + // testFromXContent, always shuffles the xcontent and then byte wise the query is different, so we check the parsed version: + assertThat(newInstance.getQuery().getQueryAsMap(), equalTo(expectedInstance.getQuery().getQueryAsMap())); + } else { + assertThat(expectedInstance.getQuery(), nullValue()); + } + assertThat(newInstance.getIndices(), equalTo(expectedInstance.getIndices())); + assertThat(newInstance.getMatchField(), equalTo(expectedInstance.getMatchField())); + assertThat(newInstance.getEnrichFields(), equalTo(expectedInstance.getEnrichFields())); + } + + public void testEnrichCrud() throws IOException { + Client client = getClient(); + XPackClient xPackClient = new XPackClient(client); + EnrichClient enrichClient = xPackClient.enrichClient(); + + EnrichPolicy policy = new EnrichPolicy("exact_match", null, Collections.emptyList(), "test", Collections.emptyList()); + String policyName = "my-policy"; + + AcknowledgedResponse acknowledgedResponse = enrichClient.putEnrichPolicy( + new PutEnrichPolicyAction.Request(policyName, + policy)).actionGet(); + + assertTrue(acknowledgedResponse.isAcknowledged()); + + GetEnrichPolicyAction.Response getResponse = enrichClient.getEnrichPolicy( + new GetEnrichPolicyAction.Request(new String[] {policyName})).actionGet(); + + assertThat(getResponse.getPolicies().size(), equalTo(1)); + assertThat(policyName, equalTo(getResponse.getPolicies().get(0).getName())); + assertEqualPolicies(policy, getResponse.getPolicies().get(0).getPolicy()); + + acknowledgedResponse = enrichClient.deleteEnrichPolicy(new DeleteEnrichPolicyAction.Request(policyName)).actionGet(); + assertTrue(acknowledgedResponse.isAcknowledged()); + } +} diff --git a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java index 1a4959c0be8..41a0c27c5d7 100644 --- a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java +++ b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.ESXPackSmokeClientTestCase; import org.elasticsearch.xpack.core.XPackClient; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteJobAction;