This commits merges (#48040) the enrich-7.x feature branch,
which is backport merge and adds a new ingest processor, named enrich processor, that allows document being ingested to be enriched with data from other indices. Besides a new enrich processor, this PR adds several APIs to manage an enrich policy. An enrich policy is in charge of making the data from other indices available to the enrich processor in an efficient manner. Related to #32789
This commit is contained in:
commit
aff0c9babc
|
@ -0,0 +1,274 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.enrich.DeletePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.ExecutePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.ExecutePolicyResponse;
|
||||
import org.elasticsearch.client.enrich.GetPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.GetPolicyResponse;
|
||||
import org.elasticsearch.client.enrich.PutPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.StatsRequest;
|
||||
import org.elasticsearch.client.enrich.StatsResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* A wrapper for the {@link RestHighLevelClient} that provides methods for
|
||||
* accessing the Elastic enrich related methods
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html#enrich-policy-apis">
|
||||
* X-Pack Enrich Policy APIs on elastic.co</a> for more information.
|
||||
*/
|
||||
public final class EnrichClient {
|
||||
|
||||
private final RestHighLevelClient restHighLevelClient;
|
||||
|
||||
EnrichClient(RestHighLevelClient restHighLevelClient) {
|
||||
this.restHighLevelClient = restHighLevelClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the put policy api, which stores an enrich policy.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#put-policy-api">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link PutPolicyRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse putPolicy(PutPolicyRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::putPolicy,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes the put policy api, which stores an enrich policy.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/enrich-policy-apis.html#put-policy-api">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link PutPolicyRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable putPolicyAsync(PutPolicyRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::putPolicy,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the delete policy api, which deletes an enrich policy.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#delete-policy-api">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link DeletePolicyRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse deletePolicy(DeletePolicyRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::deletePolicy,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes the delete policy api, which deletes an enrich policy.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#delete-policy-api">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link DeletePolicyRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable deletePolicyAsync(DeletePolicyRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::deletePolicy,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the get policy api, which retrieves an enrich policy.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#get-policy-api">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link PutPolicyRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetPolicyResponse getPolicy(GetPolicyRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::getPolicy,
|
||||
options,
|
||||
GetPolicyResponse::fromXContent,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes the get policy api, which retrieves an enrich policy.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#get-policy-api">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link PutPolicyRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable getPolicyAsync(GetPolicyRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<GetPolicyResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::getPolicy,
|
||||
options,
|
||||
GetPolicyResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the enrich stats api, which retrieves enrich related stats.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#stats-api">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link StatsRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public StatsResponse stats(StatsRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::stats,
|
||||
options,
|
||||
StatsResponse::fromXContent,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes the enrich stats api, which retrieves enrich related stats.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#stats-api">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link StatsRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable statsAsync(StatsRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<StatsResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::stats,
|
||||
options,
|
||||
StatsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the execute policy api, which executes an enrich policy.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#execute-policy">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link ExecutePolicyRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public ExecutePolicyResponse executePolicy(ExecutePolicyRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::executePolicy,
|
||||
options,
|
||||
ExecutePolicyResponse::fromXContent,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes the execute policy api, which executes an enrich policy.
|
||||
*
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-policy-apis.html#execute-policy">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the {@link ExecutePolicyRequest}
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable executePolicyAsync(ExecutePolicyRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<ExecutePolicyResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
request,
|
||||
EnrichRequestConverters::executePolicy,
|
||||
options,
|
||||
ExecutePolicyResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet()
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.enrich.DeletePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.ExecutePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.GetPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.PutPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.StatsRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||
|
||||
final class EnrichRequestConverters {
|
||||
|
||||
static Request putPolicy(PutPolicyRequest putPolicyRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_enrich", "policy")
|
||||
.addPathPart(putPolicyRequest.getName())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(putPolicyRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deletePolicy(DeletePolicyRequest deletePolicyRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_enrich", "policy")
|
||||
.addPathPart(deletePolicyRequest.getName())
|
||||
.build();
|
||||
return new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
static Request getPolicy(GetPolicyRequest getPolicyRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_enrich", "policy")
|
||||
.addCommaSeparatedPathParts(getPolicyRequest.getNames())
|
||||
.build();
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
static Request stats(StatsRequest statsRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_enrich", "_stats")
|
||||
.build();
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
static Request executePolicy(ExecutePolicyRequest executePolicyRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_enrich", "policy")
|
||||
.addPathPart(executePolicyRequest.getName())
|
||||
.addPathPartAsIs("_execute")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
if (executePolicyRequest.getWaitForCompletion() != null) {
|
||||
request.addParameter("wait_for_completion", executePolicyRequest.getWaitForCompletion().toString());
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
}
|
|
@ -257,6 +257,7 @@ public class RestHighLevelClient implements Closeable {
|
|||
private final RollupClient rollupClient = new RollupClient(this);
|
||||
private final CcrClient ccrClient = new CcrClient(this);
|
||||
private final TransformClient transformClient = new TransformClient(this);
|
||||
private final EnrichClient enrichClient = new EnrichClient(this);
|
||||
|
||||
/**
|
||||
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
|
||||
|
@ -481,6 +482,10 @@ public class RestHighLevelClient implements Closeable {
|
|||
return transformClient;
|
||||
}
|
||||
|
||||
public EnrichClient enrich() {
|
||||
return enrichClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a bulk request using the Bulk API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html">Bulk API on elastic.co</a>
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
public final class DeletePolicyRequest implements Validatable {
|
||||
|
||||
private final String name;
|
||||
|
||||
public DeletePolicyRequest(String name) {
|
||||
if (Strings.hasLength(name) == false) {
|
||||
throw new IllegalArgumentException("name must be a non-null and non-empty string");
|
||||
}
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
|
||||
public final class ExecutePolicyRequest implements Validatable {
|
||||
|
||||
private final String name;
|
||||
private Boolean waitForCompletion;
|
||||
|
||||
public ExecutePolicyRequest(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public Boolean getWaitForCompletion() {
|
||||
return waitForCompletion;
|
||||
}
|
||||
|
||||
public void setWaitForCompletion(boolean waitForCompletion) {
|
||||
this.waitForCompletion = waitForCompletion;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
public final class ExecutePolicyResponse {
|
||||
|
||||
private static final ParseField TASK_FIELD = new ParseField("task");
|
||||
private static final ParseField STATUS_FIELD = new ParseField("status");
|
||||
|
||||
private static final ConstructingObjectParser<ExecutePolicyResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"execute_policy_response",
|
||||
true,
|
||||
args -> new ExecutePolicyResponse((String) args[0], (ExecutionStatus) args[1])
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TASK_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ExecutionStatus.PARSER, STATUS_FIELD);
|
||||
}
|
||||
|
||||
public static ExecutePolicyResponse fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final String taskId;
|
||||
private final ExecutionStatus executionStatus;
|
||||
|
||||
ExecutePolicyResponse(String taskId, ExecutionStatus executionStatus) {
|
||||
this.taskId = taskId;
|
||||
this.executionStatus = executionStatus;
|
||||
}
|
||||
|
||||
public String getTaskId() {
|
||||
return taskId;
|
||||
}
|
||||
|
||||
public ExecutionStatus getExecutionStatus() {
|
||||
return executionStatus;
|
||||
}
|
||||
|
||||
public static final class ExecutionStatus {
|
||||
|
||||
private static final ParseField PHASE_FIELD = new ParseField("phase");
|
||||
|
||||
private static final ConstructingObjectParser<ExecutionStatus, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"execution_status",
|
||||
true,
|
||||
args -> new ExecutionStatus((String) args[0])
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), PHASE_FIELD);
|
||||
}
|
||||
|
||||
private final String phase;
|
||||
|
||||
ExecutionStatus(String phase) {
|
||||
this.phase = phase;
|
||||
}
|
||||
|
||||
public String getPhase() {
|
||||
return phase;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public final class GetPolicyRequest implements Validatable {
|
||||
|
||||
private final List<String> names;
|
||||
|
||||
public GetPolicyRequest() {
|
||||
this(Collections.emptyList());
|
||||
}
|
||||
|
||||
public GetPolicyRequest(String... names) {
|
||||
this(Arrays.asList(names));
|
||||
}
|
||||
|
||||
public GetPolicyRequest(List<String> names) {
|
||||
this.names = names;
|
||||
}
|
||||
|
||||
public List<String> getNames() {
|
||||
return names;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public final class GetPolicyResponse {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<GetPolicyResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_policy_response",
|
||||
true,
|
||||
args -> new GetPolicyResponse((List<NamedPolicy>) args[0])
|
||||
);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<NamedPolicy, Void> CONFIG_PARSER = new ConstructingObjectParser<>(
|
||||
"config",
|
||||
true,
|
||||
args -> (NamedPolicy) args[0]
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(),
|
||||
CONFIG_PARSER::apply, new ParseField("policies"));
|
||||
CONFIG_PARSER.declareObject(ConstructingObjectParser.constructorArg(),
|
||||
(p, c) -> NamedPolicy.fromXContent(p), new ParseField("config"));
|
||||
}
|
||||
|
||||
private final List<NamedPolicy> policies;
|
||||
|
||||
public static GetPolicyResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public GetPolicyResponse(List<NamedPolicy> policies) {
|
||||
this.policies = policies;
|
||||
}
|
||||
|
||||
public List<NamedPolicy> getPolicies() {
|
||||
return policies;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public final class NamedPolicy {
|
||||
|
||||
static final ParseField NAME_FIELD = new ParseField("name");
|
||||
static final ParseField QUERY_FIELD = new ParseField("query");
|
||||
static final ParseField INDICES_FIELD = new ParseField("indices");
|
||||
static final ParseField MATCH_FIELD_FIELD = new ParseField("match_field");
|
||||
static final ParseField ENRICH_FIELDS_FIELD = new ParseField("enrich_fields");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<NamedPolicy, String> PARSER = new ConstructingObjectParser<>(
|
||||
"policy",
|
||||
true,
|
||||
(args, policyType) -> new NamedPolicy(
|
||||
policyType,
|
||||
(String) args[0],
|
||||
(BytesReference) args[1],
|
||||
(List<String>) args[2],
|
||||
(String) args[3],
|
||||
(List<String>) args[4]
|
||||
)
|
||||
);
|
||||
|
||||
static {
|
||||
declareParserOptions(PARSER);
|
||||
}
|
||||
|
||||
private static void declareParserOptions(ConstructingObjectParser<?, ?> parser) {
|
||||
parser.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD);
|
||||
parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> {
|
||||
XContentBuilder builder = XContentBuilder.builder(p.contentType().xContent());
|
||||
builder.copyCurrentStructure(p);
|
||||
return BytesArray.bytes(builder);
|
||||
}, QUERY_FIELD);
|
||||
parser.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_FIELD);
|
||||
parser.declareString(ConstructingObjectParser.constructorArg(), MATCH_FIELD_FIELD);
|
||||
parser.declareStringArray(ConstructingObjectParser.constructorArg(), ENRICH_FIELDS_FIELD);
|
||||
}
|
||||
|
||||
public static NamedPolicy fromXContent(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
String policyType = parser.currentName();
|
||||
NamedPolicy policy = PARSER.parse(parser, policyType);
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
return policy;
|
||||
}
|
||||
|
||||
private final String type;
|
||||
private final String name;
|
||||
private final BytesReference query;
|
||||
private final List<String> indices;
|
||||
private final String matchField;
|
||||
private final List<String> enrichFields;
|
||||
|
||||
NamedPolicy(String type, String name, BytesReference query, List<String> indices, String matchField, List<String> enrichFields) {
|
||||
this.type = type;
|
||||
this.name = name;
|
||||
this.query = query;
|
||||
this.indices = indices;
|
||||
this.matchField = matchField;
|
||||
this.enrichFields = enrichFields;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public BytesReference getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
public List<String> getIndices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
public String getMatchField() {
|
||||
return matchField;
|
||||
}
|
||||
|
||||
public List<String> getEnrichFields() {
|
||||
return enrichFields;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class PutPolicyRequest implements Validatable, ToXContentObject {
|
||||
|
||||
private final String name;
|
||||
private final String type;
|
||||
private BytesReference query;
|
||||
private final List<String> indices;
|
||||
private final String matchField;
|
||||
private final List<String> enrichFields;
|
||||
|
||||
public PutPolicyRequest(String name, String type, List<String> indices, String matchField, List<String> enrichFields) {
|
||||
if (Strings.hasLength(name) == false) {
|
||||
throw new IllegalArgumentException("name must be a non-null and non-empty string");
|
||||
}
|
||||
if (Strings.hasLength(type) == false) {
|
||||
throw new IllegalArgumentException("type must be a non-null and non-empty string");
|
||||
}
|
||||
if (indices == null || indices.isEmpty()) {
|
||||
throw new IllegalArgumentException("indices must be specified");
|
||||
}
|
||||
if (Strings.hasLength(matchField) == false) {
|
||||
throw new IllegalArgumentException("matchField must be a non-null and non-empty string");
|
||||
}
|
||||
if (enrichFields == null || enrichFields.isEmpty()) {
|
||||
throw new IllegalArgumentException("enrichFields must be specified");
|
||||
}
|
||||
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
this.indices = indices;
|
||||
this.matchField = matchField;
|
||||
this.enrichFields = enrichFields;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public BytesReference getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
public void setQuery(BytesReference query) {
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
public void setQuery(QueryBuilder query) throws IOException {
|
||||
setQuery(xContentToBytes(query));
|
||||
}
|
||||
|
||||
public List<String> getIndices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
public String getMatchField() {
|
||||
return matchField;
|
||||
}
|
||||
|
||||
public List<String> getEnrichFields() {
|
||||
return enrichFields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startObject(type);
|
||||
{
|
||||
builder.field(NamedPolicy.INDICES_FIELD.getPreferredName(), indices);
|
||||
if (query != null) {
|
||||
builder.field(NamedPolicy.QUERY_FIELD.getPreferredName(), asMap(query, builder.contentType()));
|
||||
}
|
||||
builder.field(NamedPolicy.MATCH_FIELD_FIELD.getPreferredName(), matchField);
|
||||
builder.field(NamedPolicy.ENRICH_FIELDS_FIELD.getPreferredName(), enrichFields);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
PutPolicyRequest that = (PutPolicyRequest) o;
|
||||
return Objects.equals(name, that.name) &&
|
||||
Objects.equals(type, that.type) &&
|
||||
Objects.equals(query, that.query) &&
|
||||
Objects.equals(indices, that.indices) &&
|
||||
Objects.equals(matchField, that.matchField) &&
|
||||
Objects.equals(enrichFields, that.enrichFields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, type, query, indices, matchField, enrichFields);
|
||||
}
|
||||
|
||||
private static BytesReference xContentToBytes(ToXContentObject object) throws IOException {
|
||||
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
|
||||
object.toXContent(builder, ToXContentObject.EMPTY_PARAMS);
|
||||
return BytesReference.bytes(builder);
|
||||
}
|
||||
}
|
||||
|
||||
static Map<String, Object> asMap(BytesReference bytesReference, XContentType xContentType) {
|
||||
return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, xContentType).v2();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
|
||||
public final class StatsRequest implements Validatable {
|
||||
}
|
|
@ -0,0 +1,191 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.tasks.TaskInfo;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class StatsResponse {
|
||||
|
||||
private static ParseField EXECUTING_POLICIES_FIELD = new ParseField("executing_policies");
|
||||
private static ParseField COORDINATOR_STATS_FIELD = new ParseField("coordinator_stats");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<StatsResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"stats_response",
|
||||
true,
|
||||
args -> new StatsResponse((List<ExecutingPolicy>) args[0], (List<CoordinatorStats>) args[1])
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), ExecutingPolicy.PARSER::apply, EXECUTING_POLICIES_FIELD);
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), CoordinatorStats.PARSER::apply, COORDINATOR_STATS_FIELD);
|
||||
}
|
||||
|
||||
public static StatsResponse fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final List<ExecutingPolicy> executingPolicies;
|
||||
private final List<CoordinatorStats> coordinatorStats;
|
||||
|
||||
public StatsResponse(List<ExecutingPolicy> executingPolicies, List<CoordinatorStats> coordinatorStats) {
|
||||
this.executingPolicies = executingPolicies;
|
||||
this.coordinatorStats = coordinatorStats;
|
||||
}
|
||||
|
||||
public List<ExecutingPolicy> getExecutingPolicies() {
|
||||
return executingPolicies;
|
||||
}
|
||||
|
||||
public List<CoordinatorStats> getCoordinatorStats() {
|
||||
return coordinatorStats;
|
||||
}
|
||||
|
||||
public static final class CoordinatorStats {
|
||||
|
||||
static ParseField NODE_ID_FIELD = new ParseField("node_id");
|
||||
static ParseField QUEUE_SIZE_FIELD = new ParseField("queue_size");
|
||||
static ParseField REMOTE_REQUESTS_CONCURRENT_FIELD = new ParseField("remote_requests_current");
|
||||
static ParseField REMOTE_REQUESTS_TOTAL_FIELD = new ParseField("remote_requests_total");
|
||||
static ParseField EXECUTED_SEARCHES_FIELD = new ParseField("executed_searches_total");
|
||||
|
||||
private static final ConstructingObjectParser<CoordinatorStats, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"coordinator_stats_item",
|
||||
true,
|
||||
args -> new CoordinatorStats((String) args[0], (int) args[1], (int) args[2], (long) args[3], (long) args[4])
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), NODE_ID_FIELD);
|
||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), QUEUE_SIZE_FIELD);
|
||||
PARSER.declareInt(ConstructingObjectParser.constructorArg(), REMOTE_REQUESTS_CONCURRENT_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), REMOTE_REQUESTS_TOTAL_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), EXECUTED_SEARCHES_FIELD);
|
||||
}
|
||||
|
||||
private final String nodeId;
|
||||
private final int queueSize;
|
||||
private final int remoteRequestsCurrent;
|
||||
private final long remoteRequestsTotal;
|
||||
private final long executedSearchesTotal;
|
||||
|
||||
public CoordinatorStats(String nodeId,
|
||||
int queueSize,
|
||||
int remoteRequestsCurrent,
|
||||
long remoteRequestsTotal,
|
||||
long executedSearchesTotal) {
|
||||
this.nodeId = nodeId;
|
||||
this.queueSize = queueSize;
|
||||
this.remoteRequestsCurrent = remoteRequestsCurrent;
|
||||
this.remoteRequestsTotal = remoteRequestsTotal;
|
||||
this.executedSearchesTotal = executedSearchesTotal;
|
||||
}
|
||||
|
||||
public String getNodeId() {
|
||||
return nodeId;
|
||||
}
|
||||
|
||||
public int getQueueSize() {
|
||||
return queueSize;
|
||||
}
|
||||
|
||||
public int getRemoteRequestsCurrent() {
|
||||
return remoteRequestsCurrent;
|
||||
}
|
||||
|
||||
public long getRemoteRequestsTotal() {
|
||||
return remoteRequestsTotal;
|
||||
}
|
||||
|
||||
public long getExecutedSearchesTotal() {
|
||||
return executedSearchesTotal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
CoordinatorStats stats = (CoordinatorStats) o;
|
||||
return Objects.equals(nodeId, stats.nodeId) &&
|
||||
queueSize == stats.queueSize &&
|
||||
remoteRequestsCurrent == stats.remoteRequestsCurrent &&
|
||||
remoteRequestsTotal == stats.remoteRequestsTotal &&
|
||||
executedSearchesTotal == stats.executedSearchesTotal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(nodeId, queueSize, remoteRequestsCurrent, remoteRequestsTotal, executedSearchesTotal);
|
||||
}
|
||||
}
|
||||
|
||||
public static class ExecutingPolicy {
|
||||
|
||||
static ParseField NAME_FIELD = new ParseField("name");
|
||||
static ParseField TASK_FIELD = new ParseField("task");
|
||||
|
||||
private static final ConstructingObjectParser<ExecutingPolicy, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"executing_policy_item",
|
||||
true,
|
||||
args -> new ExecutingPolicy((String) args[0], (TaskInfo) args[1])
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> TaskInfo.fromXContent(p), TASK_FIELD);
|
||||
}
|
||||
|
||||
private final String name;
|
||||
private final TaskInfo taskInfo;
|
||||
|
||||
public ExecutingPolicy(String name, TaskInfo taskInfo) {
|
||||
this.name = name;
|
||||
this.taskInfo = taskInfo;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public TaskInfo getTaskInfo() {
|
||||
return taskInfo;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
ExecutingPolicy that = (ExecutingPolicy) o;
|
||||
return name.equals(that.name) &&
|
||||
taskInfo.equals(that.taskInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, taskInfo);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -322,10 +322,12 @@ public final class Role {
|
|||
public static final String READ_CCR = "read_ccr";
|
||||
public static final String MANAGE_ILM = "manage_ilm";
|
||||
public static final String READ_ILM = "read_ilm";
|
||||
public static final String MANAGE_ENRICH = "manage_enrich";
|
||||
public static final String[] ALL_ARRAY = new String[] { NONE, ALL, MONITOR, MONITOR_TRANSFORM_DEPRECATED, MONITOR_TRANSFORM,
|
||||
MONITOR_ML, MONITOR_WATCHER, MONITOR_ROLLUP, MANAGE, MANAGE_TRANSFORM_DEPRECATED, MANAGE_TRANSFORM,
|
||||
MANAGE_ML, MANAGE_WATCHER, MANAGE_ROLLUP, MANAGE_INDEX_TEMPLATES, MANAGE_INGEST_PIPELINES, TRANSPORT_CLIENT,
|
||||
MANAGE_SECURITY, MANAGE_SAML, MANAGE_OIDC, MANAGE_TOKEN, MANAGE_PIPELINE, MANAGE_CCR, READ_CCR, MANAGE_ILM, READ_ILM};
|
||||
MANAGE_SECURITY, MANAGE_SAML, MANAGE_OIDC, MANAGE_TOKEN, MANAGE_PIPELINE, MANAGE_CCR, READ_CCR, MANAGE_ILM, READ_ILM,
|
||||
MANAGE_ENRICH };
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.enrich.DeletePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.ExecutePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.ExecutePolicyResponse;
|
||||
import org.elasticsearch.client.enrich.GetPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.GetPolicyResponse;
|
||||
import org.elasticsearch.client.enrich.PutPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.StatsRequest;
|
||||
import org.elasticsearch.client.enrich.StatsResponse;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class EnrichIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
public void testCRUD() throws Exception {
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest("my-index")
|
||||
.mapping(Collections.singletonMap("properties", Collections.singletonMap("enrich_key",
|
||||
Collections.singletonMap("type", "keyword"))));
|
||||
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
|
||||
final EnrichClient enrichClient = highLevelClient().enrich();
|
||||
PutPolicyRequest putPolicyRequest = new PutPolicyRequest("my-policy", "match",
|
||||
Collections.singletonList("my-index"), "enrich_key", Collections.singletonList("enrich_value"));
|
||||
AcknowledgedResponse putPolicyResponse = execute(putPolicyRequest, enrichClient::putPolicy, enrichClient::putPolicyAsync);
|
||||
assertThat(putPolicyResponse.isAcknowledged(), is(true));
|
||||
|
||||
GetPolicyRequest getPolicyRequest = randomBoolean() ? new GetPolicyRequest("my-policy") : new GetPolicyRequest();
|
||||
GetPolicyResponse getPolicyResponse = execute(getPolicyRequest, enrichClient::getPolicy, enrichClient::getPolicyAsync);
|
||||
assertThat(getPolicyResponse.getPolicies().size(), equalTo(1));
|
||||
assertThat(getPolicyResponse.getPolicies().get(0).getType(), equalTo(putPolicyRequest.getType()));
|
||||
assertThat(getPolicyResponse.getPolicies().get(0).getIndices(), equalTo(putPolicyRequest.getIndices()));
|
||||
assertThat(getPolicyResponse.getPolicies().get(0).getMatchField(), equalTo(putPolicyRequest.getMatchField()));
|
||||
assertThat(getPolicyResponse.getPolicies().get(0).getEnrichFields(), equalTo(putPolicyRequest.getEnrichFields()));
|
||||
|
||||
StatsRequest statsRequest = new StatsRequest();
|
||||
StatsResponse statsResponse = execute(statsRequest, enrichClient::stats, enrichClient::statsAsync);
|
||||
assertThat(statsResponse.getExecutingPolicies().size(), equalTo(0));
|
||||
assertThat(statsResponse.getCoordinatorStats().size(), equalTo(1));
|
||||
assertThat(statsResponse.getCoordinatorStats().get(0).getNodeId(), notNullValue());
|
||||
assertThat(statsResponse.getCoordinatorStats().get(0).getQueueSize(), greaterThanOrEqualTo(0));
|
||||
assertThat(statsResponse.getCoordinatorStats().get(0).getRemoteRequestsCurrent(), greaterThanOrEqualTo(0));
|
||||
assertThat(statsResponse.getCoordinatorStats().get(0).getRemoteRequestsTotal(), greaterThanOrEqualTo(0L));
|
||||
assertThat(statsResponse.getCoordinatorStats().get(0).getExecutedSearchesTotal(), greaterThanOrEqualTo(0L));
|
||||
|
||||
ExecutePolicyRequest executePolicyRequest = new ExecutePolicyRequest("my-policy");
|
||||
ExecutePolicyResponse executePolicyResponse =
|
||||
execute(executePolicyRequest, enrichClient::executePolicy, enrichClient::executePolicyAsync);
|
||||
assertThat(executePolicyResponse.getExecutionStatus().getPhase(), equalTo("COMPLETE"));
|
||||
|
||||
DeletePolicyRequest deletePolicyRequest = new DeletePolicyRequest("my-policy");
|
||||
AcknowledgedResponse deletePolicyResponse =
|
||||
execute(deletePolicyRequest, enrichClient::deletePolicy, enrichClient::deletePolicyAsync);
|
||||
assertThat(deletePolicyResponse.isAcknowledged(), is(true));
|
||||
|
||||
getPolicyRequest = new GetPolicyRequest();
|
||||
getPolicyResponse = execute(getPolicyRequest, enrichClient::getPolicy, enrichClient::getPolicyAsync);
|
||||
assertThat(getPolicyResponse.getPolicies().size(), equalTo(0));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,114 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.enrich.DeletePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.ExecutePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.GetPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.PutPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.PutPolicyRequestTests;
|
||||
import org.elasticsearch.client.enrich.StatsRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class EnrichRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public void testPutPolicy() throws Exception {
|
||||
PutPolicyRequest request = PutPolicyRequestTests.createTestInstance();
|
||||
Request result = EnrichRequestConverters.putPolicy(request);
|
||||
|
||||
assertThat(result.getMethod(), equalTo(HttpPut.METHOD_NAME));
|
||||
assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getName()));
|
||||
assertThat(result.getParameters().size(), equalTo(0));
|
||||
RequestConvertersTests.assertToXContentBody(request, result.getEntity());
|
||||
}
|
||||
|
||||
public void testDeletePolicy() {
|
||||
DeletePolicyRequest request = new DeletePolicyRequest(randomAlphaOfLength(4));
|
||||
Request result = EnrichRequestConverters.deletePolicy(request);
|
||||
|
||||
assertThat(result.getMethod(), equalTo(HttpDelete.METHOD_NAME));
|
||||
assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getName()));
|
||||
assertThat(result.getParameters().size(), equalTo(0));
|
||||
assertThat(result.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testGetPolicy() {
|
||||
GetPolicyRequest request = new GetPolicyRequest(randomAlphaOfLength(4));
|
||||
Request result = EnrichRequestConverters.getPolicy(request);
|
||||
|
||||
assertThat(result.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getNames().get(0)));
|
||||
assertThat(result.getParameters().size(), equalTo(0));
|
||||
assertThat(result.getEntity(), nullValue());
|
||||
|
||||
request = new GetPolicyRequest(randomAlphaOfLength(4), randomAlphaOfLength(4));
|
||||
result = EnrichRequestConverters.getPolicy(request);
|
||||
|
||||
assertThat(result.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getNames().get(0) + "," + request.getNames().get(1)));
|
||||
assertThat(result.getParameters().size(), equalTo(0));
|
||||
assertThat(result.getEntity(), nullValue());
|
||||
|
||||
request = new GetPolicyRequest();
|
||||
result = EnrichRequestConverters.getPolicy(request);
|
||||
|
||||
assertThat(result.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
assertThat(result.getEndpoint(), equalTo("/_enrich/policy"));
|
||||
assertThat(result.getParameters().size(), equalTo(0));
|
||||
assertThat(result.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testStats() {
|
||||
StatsRequest request = new StatsRequest();
|
||||
Request result = EnrichRequestConverters.stats(request);
|
||||
|
||||
assertThat(result.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
assertThat(result.getEndpoint(), equalTo("/_enrich/_stats"));
|
||||
assertThat(result.getParameters().size(), equalTo(0));
|
||||
assertThat(result.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testExecutePolicy() {
|
||||
ExecutePolicyRequest request = new ExecutePolicyRequest(randomAlphaOfLength(4));
|
||||
Request result = EnrichRequestConverters.executePolicy(request);
|
||||
|
||||
assertThat(result.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||
assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getName() + "/_execute"));
|
||||
assertThat(result.getParameters().size(), equalTo(0));
|
||||
assertThat(result.getEntity(), nullValue());
|
||||
|
||||
request = new ExecutePolicyRequest(randomAlphaOfLength(4));
|
||||
request.setWaitForCompletion(randomBoolean());
|
||||
result = EnrichRequestConverters.executePolicy(request);
|
||||
|
||||
assertThat(result.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||
assertThat(result.getEndpoint(), equalTo("/_enrich/policy/" + request.getName() + "/_execute"));
|
||||
assertThat(result.getParameters().size(), equalTo(1));
|
||||
assertThat(result.getParameters().get("wait_for_completion"), equalTo(request.getWaitForCompletion().toString()));
|
||||
assertThat(result.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
}
|
|
@ -857,6 +857,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
apiName.startsWith("security.") == false &&
|
||||
apiName.startsWith("index_lifecycle.") == false &&
|
||||
apiName.startsWith("ccr.") == false &&
|
||||
apiName.startsWith("enrich.") == false &&
|
||||
apiName.startsWith("transform.") == false &&
|
||||
apiName.endsWith("freeze") == false &&
|
||||
apiName.endsWith("reload_analyzers") == false &&
|
||||
|
|
|
@ -0,0 +1,314 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.documentation;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.enrich.DeletePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.ExecutePolicyRequest;
|
||||
import org.elasticsearch.client.enrich.ExecutePolicyResponse;
|
||||
import org.elasticsearch.client.enrich.NamedPolicy;
|
||||
import org.elasticsearch.client.enrich.GetPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.GetPolicyResponse;
|
||||
import org.elasticsearch.client.enrich.PutPolicyRequest;
|
||||
import org.elasticsearch.client.enrich.StatsRequest;
|
||||
import org.elasticsearch.client.enrich.StatsResponse;
|
||||
import org.elasticsearch.client.enrich.StatsResponse.CoordinatorStats;
|
||||
import org.elasticsearch.client.enrich.StatsResponse.ExecutingPolicy;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.junit.After;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class EnrichDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@After
|
||||
public void cleanup() {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
DeletePolicyRequest deletePolicyRequest = new DeletePolicyRequest("users-policy");
|
||||
try {
|
||||
client.enrich().deletePolicy(deletePolicyRequest, RequestOptions.DEFAULT);
|
||||
} catch (Exception e) {
|
||||
// ignore... it is ok if policy has already been removed
|
||||
}
|
||||
}
|
||||
|
||||
public void testPutPolicy() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
// tag::enrich-put-policy-request
|
||||
PutPolicyRequest putPolicyRequest = new PutPolicyRequest(
|
||||
"users-policy", "match", Arrays.asList("users"),
|
||||
"email", Arrays.asList("address", "zip", "city", "state"));
|
||||
// end::enrich-put-policy-request
|
||||
|
||||
// tag::enrich-put-policy-execute
|
||||
AcknowledgedResponse putPolicyResponse =
|
||||
client.enrich().putPolicy(putPolicyRequest, RequestOptions.DEFAULT);
|
||||
// end::enrich-put-policy-execute
|
||||
|
||||
// tag::enrich-put-policy-response
|
||||
boolean isAcknowledged =
|
||||
putPolicyResponse.isAcknowledged(); // <1>
|
||||
// end::enrich-put-policy-response
|
||||
|
||||
// tag::enrich-put-policy-execute-listener
|
||||
ActionListener<AcknowledgedResponse> listener =
|
||||
new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse response) { // <1>
|
||||
boolean isAcknowledged = response.isAcknowledged();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::enrich-put-policy-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::enrich-put-policy-execute-async
|
||||
client.enrich().putPolicyAsync(putPolicyRequest,
|
||||
RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::enrich-put-policy-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testDeletePolicy() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
// Add a policy, so that it can be deleted:
|
||||
PutPolicyRequest putPolicyRequest = new PutPolicyRequest(
|
||||
"users-policy", "match", Arrays.asList("users"),
|
||||
"email", Arrays.asList("address", "zip", "city", "state"));
|
||||
client.enrich().putPolicy(putPolicyRequest, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
// tag::enrich-delete-policy-request
|
||||
DeletePolicyRequest deletePolicyRequest =
|
||||
new DeletePolicyRequest("users-policy");
|
||||
// end::enrich-delete-policy-request
|
||||
|
||||
// tag::enrich-delete-policy-execute
|
||||
AcknowledgedResponse deletePolicyResponse = client.enrich()
|
||||
.deletePolicy(deletePolicyRequest, RequestOptions.DEFAULT);
|
||||
// end::enrich-delete-policy-execute
|
||||
|
||||
// tag::enrich-delete-policy-response
|
||||
boolean isAcknowledged =
|
||||
deletePolicyResponse.isAcknowledged(); // <1>
|
||||
// end::enrich-delete-policy-response
|
||||
|
||||
// tag::enrich-delete-policy-execute-listener
|
||||
ActionListener<AcknowledgedResponse> listener =
|
||||
new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse response) { // <1>
|
||||
boolean isAcknowledged = response.isAcknowledged();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::enrich-delete-policy-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::enrich-delete-policy-execute-async
|
||||
client.enrich().deletePolicyAsync(deletePolicyRequest,
|
||||
RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::enrich-delete-policy-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testGetPolicy() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
PutPolicyRequest putPolicyRequest = new PutPolicyRequest(
|
||||
"users-policy", "match", Collections.singletonList("users"),
|
||||
"email", Arrays.asList("address", "zip", "city", "state"));
|
||||
client.enrich().putPolicy(putPolicyRequest, RequestOptions.DEFAULT);
|
||||
|
||||
// tag::enrich-get-policy-request
|
||||
GetPolicyRequest getPolicyRequest = new GetPolicyRequest("users-policy");
|
||||
// end::enrich-get-policy-request
|
||||
|
||||
// tag::enrich-get-policy-execute
|
||||
GetPolicyResponse getPolicyResponse =
|
||||
client.enrich().getPolicy(getPolicyRequest, RequestOptions.DEFAULT);
|
||||
// end::enrich-get-policy-execute
|
||||
|
||||
// tag::enrich-get-policy-response
|
||||
List<NamedPolicy> policies = getPolicyResponse.getPolicies(); // <1>
|
||||
NamedPolicy policy = policies.get(0);
|
||||
// end::enrich-get-policy-response
|
||||
|
||||
// tag::enrich-get-policy-execute-listener
|
||||
ActionListener<GetPolicyResponse> listener =
|
||||
new ActionListener<GetPolicyResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetPolicyResponse response) { // <1>
|
||||
List<NamedPolicy> policies = response.getPolicies();
|
||||
NamedPolicy policy = policies.get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::enrich-get-policy-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::enrich-get-policy-execute-async
|
||||
client.enrich().getPolicyAsync(getPolicyRequest,
|
||||
RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::enrich-get-policy-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testStats() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::enrich-stats-request
|
||||
StatsRequest statsRequest = new StatsRequest();
|
||||
// end::enrich-stats-request
|
||||
|
||||
// tag::enrich-stats-execute
|
||||
StatsResponse statsResponse =
|
||||
client.enrich().stats(statsRequest, RequestOptions.DEFAULT);
|
||||
// end::enrich-stats-execute
|
||||
|
||||
// tag::enrich-stats-response
|
||||
List<ExecutingPolicy> executingPolicies =
|
||||
statsResponse.getExecutingPolicies(); // <1>
|
||||
List<CoordinatorStats> coordinatorStats =
|
||||
statsResponse.getCoordinatorStats(); // <2>
|
||||
// end::enrich-stats-response
|
||||
|
||||
// tag::enrich-stats-execute-listener
|
||||
ActionListener<StatsResponse> listener =
|
||||
new ActionListener<StatsResponse>() {
|
||||
@Override
|
||||
public void onResponse(StatsResponse response) { // <1>
|
||||
List<ExecutingPolicy> executingPolicies =
|
||||
statsResponse.getExecutingPolicies();
|
||||
List<CoordinatorStats> coordinatorStats =
|
||||
statsResponse.getCoordinatorStats();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::enrich-stats-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::enrich-stats-execute-async
|
||||
client.enrich().statsAsync(statsRequest, RequestOptions.DEFAULT,
|
||||
listener); // <1>
|
||||
// end::enrich-stats-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testExecutePolicy() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest("users")
|
||||
.mapping(Collections.singletonMap("properties", Collections.singletonMap("email",
|
||||
Collections.singletonMap("type", "keyword"))));
|
||||
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
PutPolicyRequest putPolicyRequest = new PutPolicyRequest(
|
||||
"users-policy", "match", Collections.singletonList("users"),
|
||||
"email", Arrays.asList("address", "zip", "city", "state"));
|
||||
client.enrich().putPolicy(putPolicyRequest, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
// tag::enrich-execute-policy-request
|
||||
ExecutePolicyRequest request =
|
||||
new ExecutePolicyRequest("users-policy");
|
||||
// end::enrich-execute-policy-request
|
||||
|
||||
// tag::enrich-execute-policy-execute
|
||||
ExecutePolicyResponse response =
|
||||
client.enrich().executePolicy(request, RequestOptions.DEFAULT);
|
||||
// end::enrich-execute-policy-execute
|
||||
|
||||
// tag::enrich-execute-policy-response
|
||||
ExecutePolicyResponse.ExecutionStatus status =
|
||||
response.getExecutionStatus();
|
||||
// end::enrich-execute-policy-response
|
||||
|
||||
// tag::enrich-execute-policy-execute-listener
|
||||
ActionListener<ExecutePolicyResponse> listener =
|
||||
new ActionListener<ExecutePolicyResponse>() {
|
||||
@Override
|
||||
public void onResponse(ExecutePolicyResponse response) { // <1>
|
||||
ExecutePolicyResponse.ExecutionStatus status =
|
||||
response.getExecutionStatus();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::enrich-execute-policy-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::enrich-execute-policy-execute-async
|
||||
client.enrich().executePolicyAsync(request, RequestOptions.DEFAULT,
|
||||
listener); // <1>
|
||||
// end::enrich-execute-policy-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
}
|
|
@ -681,7 +681,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
List<Role> roles = response.getRoles();
|
||||
assertNotNull(response);
|
||||
// 29 system roles plus the three we created
|
||||
assertThat(roles.size(), equalTo(32));
|
||||
assertThat(roles.size(), equalTo(33));
|
||||
}
|
||||
|
||||
{
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.AbstractResponseTestCase;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class ExecutePolicyResponseTests extends AbstractResponseTestCase<ExecuteEnrichPolicyAction.Response, ExecutePolicyResponse> {
|
||||
|
||||
@Override
|
||||
protected ExecuteEnrichPolicyAction.Response createServerTestInstance(XContentType xContentType) {
|
||||
if (randomBoolean()) {
|
||||
return new ExecuteEnrichPolicyAction.Response(new ExecuteEnrichPolicyStatus(randomAlphaOfLength(4)));
|
||||
} else {
|
||||
return new ExecuteEnrichPolicyAction.Response(new TaskId(randomAlphaOfLength(4), randomNonNegativeLong()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ExecutePolicyResponse doParseToClientInstance(XContentParser parser) throws IOException {
|
||||
return ExecutePolicyResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertInstances(ExecuteEnrichPolicyAction.Response serverTestInstance, ExecutePolicyResponse clientInstance) {
|
||||
if (serverTestInstance.getStatus() != null) {
|
||||
assertThat(clientInstance.getExecutionStatus().getPhase(), equalTo(serverTestInstance.getStatus().getPhase()));
|
||||
assertThat(clientInstance.getTaskId(), nullValue());
|
||||
} else if (serverTestInstance.getTaskId() != null) {
|
||||
assertThat(clientInstance.getTaskId(), equalTo(clientInstance.getTaskId()));
|
||||
assertThat(clientInstance.getExecutionStatus(), nullValue());
|
||||
} else {
|
||||
assert false;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.AbstractResponseTestCase;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class GetPolicyResponseTests extends AbstractResponseTestCase<GetEnrichPolicyAction.Response, GetPolicyResponse> {
|
||||
|
||||
@Override
|
||||
protected GetEnrichPolicyAction.Response createServerTestInstance(XContentType xContentType) {
|
||||
int numPolicies = randomIntBetween(0, 8);
|
||||
Map<String, EnrichPolicy> policies = new HashMap<>(numPolicies);
|
||||
for (int i = 0; i < numPolicies; i++) {
|
||||
policies.put(randomAlphaOfLength(4), createRandomEnrichPolicy(xContentType));
|
||||
}
|
||||
return new GetEnrichPolicyAction.Response(policies);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetPolicyResponse doParseToClientInstance(XContentParser parser) throws IOException {
|
||||
return GetPolicyResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertInstances(GetEnrichPolicyAction.Response serverTestInstance, GetPolicyResponse clientInstance) {
|
||||
assertThat(clientInstance.getPolicies().size(), equalTo(serverTestInstance.getPolicies().size()));
|
||||
for (int i = 0; i < clientInstance.getPolicies().size(); i++) {
|
||||
assertThat(clientInstance.getPolicies().get(i).getType(),
|
||||
equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getType()));
|
||||
assertThat(clientInstance.getPolicies().get(i).getName(),
|
||||
equalTo(serverTestInstance.getPolicies().get(i).getName()));
|
||||
assertThat(clientInstance.getPolicies().get(i).getIndices(),
|
||||
equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getIndices()));
|
||||
if (clientInstance.getPolicies().get(i).getQuery() != null) {
|
||||
assertThat(clientInstance.getPolicies().get(i).getQuery(),
|
||||
equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getQuery().getQuery()));
|
||||
} else {
|
||||
assertThat(serverTestInstance.getPolicies().get(i).getPolicy().getQuery(), nullValue());
|
||||
}
|
||||
assertThat(clientInstance.getPolicies().get(i).getMatchField(),
|
||||
equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getMatchField()));
|
||||
assertThat(clientInstance.getPolicies().get(i).getEnrichFields(),
|
||||
equalTo(serverTestInstance.getPolicies().get(i).getPolicy().getEnrichFields()));
|
||||
}
|
||||
}
|
||||
|
||||
private static EnrichPolicy createRandomEnrichPolicy(XContentType xContentType){
|
||||
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
|
||||
builder.startObject();
|
||||
builder.endObject();
|
||||
BytesReference querySource = BytesArray.bytes(builder);
|
||||
return new EnrichPolicy(
|
||||
randomAlphaOfLength(4),
|
||||
randomBoolean() ? new EnrichPolicy.QuerySource(querySource, xContentType) : null,
|
||||
Arrays.asList(generateRandomStringArray(8, 4, false, false)),
|
||||
randomAlphaOfLength(4),
|
||||
Arrays.asList(generateRandomStringArray(8, 4, false, false))
|
||||
);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.AbstractRequestTestCase;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class PutPolicyRequestTests extends AbstractRequestTestCase<PutPolicyRequest, PutEnrichPolicyAction.Request> {
|
||||
|
||||
public void testValidate() {
|
||||
PutPolicyRequest request = createClientTestInstance();
|
||||
assertThat(request.validate().isPresent(), is(false));
|
||||
|
||||
Exception e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new PutPolicyRequest(request.getName(), request.getType(), request.getIndices(), null, request.getEnrichFields()));
|
||||
assertThat(e.getMessage(), containsString("matchField must be a non-null and non-empty string"));
|
||||
}
|
||||
|
||||
public void testEqualsAndHashcode() {
|
||||
PutPolicyRequest testInstance = createTestInstance();
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(testInstance, (original) -> {
|
||||
PutPolicyRequest copy = new PutPolicyRequest(original.getName(), original.getType(), original.getIndices(),
|
||||
original.getMatchField(), original.getEnrichFields());
|
||||
copy.setQuery(original.getQuery());
|
||||
return copy;
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PutPolicyRequest createClientTestInstance() {
|
||||
return createTestInstance("name");
|
||||
}
|
||||
|
||||
public static PutPolicyRequest createTestInstance() {
|
||||
return createTestInstance(randomAlphaOfLength(4));
|
||||
}
|
||||
|
||||
public static PutPolicyRequest createTestInstance(String name) {
|
||||
PutPolicyRequest testInstance = new PutPolicyRequest(
|
||||
name,
|
||||
randomAlphaOfLength(4),
|
||||
Arrays.asList(generateRandomStringArray(4, 4, false, false)),
|
||||
randomAlphaOfLength(4),
|
||||
Arrays.asList(generateRandomStringArray(4, 4, false, false))
|
||||
);
|
||||
if (randomBoolean()) {
|
||||
try {
|
||||
testInstance.setQuery(new MatchAllQueryBuilder());
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
return testInstance;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PutEnrichPolicyAction.Request doParseToServerInstance(XContentParser parser) throws IOException {
|
||||
return PutEnrichPolicyAction.fromXContent(parser, "name");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertInstances(PutEnrichPolicyAction.Request serverInstance, PutPolicyRequest clientTestInstance) {
|
||||
assertThat(clientTestInstance.getName(), equalTo(serverInstance.getName()));
|
||||
assertThat(clientTestInstance.getType(), equalTo(serverInstance.getPolicy().getType()));
|
||||
assertThat(clientTestInstance.getIndices(), equalTo(serverInstance.getPolicy().getIndices()));
|
||||
if (clientTestInstance.getQuery() != null) {
|
||||
XContentType type = serverInstance.getPolicy().getQuery().getContentType();
|
||||
assertThat(PutPolicyRequest.asMap(clientTestInstance.getQuery(), type),
|
||||
equalTo(PutPolicyRequest.asMap(serverInstance.getPolicy().getQuery().getQuery(), type)));
|
||||
} else {
|
||||
assertThat(serverInstance.getPolicy().getQuery(), nullValue());
|
||||
}
|
||||
assertThat(clientTestInstance.getMatchField(), equalTo(serverInstance.getPolicy().getMatchField()));
|
||||
assertThat(clientTestInstance.getEnrichFields(), equalTo(serverInstance.getPolicy().getEnrichFields()));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.enrich;
|
||||
|
||||
import org.elasticsearch.client.AbstractResponseTestCase;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.tasks.TaskInfo;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class StatsResponseTests extends AbstractResponseTestCase<EnrichStatsAction.Response, StatsResponse> {
|
||||
|
||||
@Override
|
||||
protected EnrichStatsAction.Response createServerTestInstance(XContentType xContentType) {
|
||||
int numExecutingPolicies = randomIntBetween(0, 16);
|
||||
List<EnrichStatsAction.Response.ExecutingPolicy> executingPolicies = new ArrayList<>(numExecutingPolicies);
|
||||
for (int i = 0; i < numExecutingPolicies; i++) {
|
||||
TaskInfo taskInfo = randomTaskInfo();
|
||||
executingPolicies.add(new EnrichStatsAction.Response.ExecutingPolicy(randomAlphaOfLength(4), taskInfo));
|
||||
}
|
||||
int numCoordinatingStats = randomIntBetween(0, 16);
|
||||
List<EnrichStatsAction.Response.CoordinatorStats> coordinatorStats = new ArrayList<>(numCoordinatingStats);
|
||||
for (int i = 0; i < numCoordinatingStats; i++) {
|
||||
EnrichStatsAction.Response.CoordinatorStats stats = new EnrichStatsAction.Response.CoordinatorStats(
|
||||
randomAlphaOfLength(4), randomIntBetween(0, 8096), randomIntBetween(0, 8096), randomNonNegativeLong(),
|
||||
randomNonNegativeLong());
|
||||
coordinatorStats.add(stats);
|
||||
}
|
||||
return new EnrichStatsAction.Response(executingPolicies, coordinatorStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StatsResponse doParseToClientInstance(XContentParser parser) throws IOException {
|
||||
return StatsResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertInstances(EnrichStatsAction.Response serverTestInstance, StatsResponse clientInstance) {
|
||||
assertThat(clientInstance.getExecutingPolicies().size(), equalTo(serverTestInstance.getExecutingPolicies().size()));
|
||||
for (int i = 0; i < clientInstance.getExecutingPolicies().size(); i++) {
|
||||
StatsResponse.ExecutingPolicy actual = clientInstance.getExecutingPolicies().get(i);
|
||||
EnrichStatsAction.Response.ExecutingPolicy expected = serverTestInstance.getExecutingPolicies().get(i);
|
||||
assertThat(actual.getName(), equalTo(expected.getName()));
|
||||
assertThat(actual.getTaskInfo(), equalTo(actual.getTaskInfo()));
|
||||
}
|
||||
|
||||
assertThat(clientInstance.getCoordinatorStats().size(), equalTo(serverTestInstance.getCoordinatorStats().size()));
|
||||
for (int i = 0; i < clientInstance.getCoordinatorStats().size(); i++) {
|
||||
StatsResponse.CoordinatorStats actual = clientInstance.getCoordinatorStats().get(i);
|
||||
EnrichStatsAction.Response.CoordinatorStats expected = serverTestInstance.getCoordinatorStats().get(i);
|
||||
assertThat(actual.getNodeId(), equalTo(expected.getNodeId()));
|
||||
assertThat(actual.getQueueSize(), equalTo(expected.getQueueSize()));
|
||||
assertThat(actual.getRemoteRequestsCurrent(), equalTo(expected.getRemoteRequestsCurrent()));
|
||||
assertThat(actual.getRemoteRequestsTotal(), equalTo(expected.getRemoteRequestsTotal()));
|
||||
assertThat(actual.getExecutedSearchesTotal(), equalTo(expected.getExecutedSearchesTotal()));
|
||||
}
|
||||
}
|
||||
|
||||
private static TaskInfo randomTaskInfo() {
|
||||
TaskId taskId = new TaskId(randomAlphaOfLength(5), randomLong());
|
||||
String type = randomAlphaOfLength(5);
|
||||
String action = randomAlphaOfLength(5);
|
||||
String description = randomAlphaOfLength(5);
|
||||
long startTime = randomLong();
|
||||
long runningTimeNanos = randomLong();
|
||||
boolean cancellable = randomBoolean();
|
||||
TaskId parentTaskId = TaskId.EMPTY_TASK_ID;
|
||||
Map<String, String> headers = randomBoolean() ?
|
||||
Collections.emptyMap() :
|
||||
Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5));
|
||||
return new TaskInfo(taskId, type, action, description, null, startTime, runningTimeNanos, cancellable, parentTaskId, headers);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
--
|
||||
:api: enrich-delete-policy
|
||||
:request: DeletePolicyRequest
|
||||
:response: AcknowledgedResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Delete Policy API
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Request
|
||||
|
||||
The Delete Policy API deletes an enrich policy from Elasticsearch.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ indicates if the delete policy request was acknowledged.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Whether delete policy request was acknowledged.
|
||||
|
||||
include::../execution.asciidoc[]
|
|
@ -0,0 +1,30 @@
|
|||
--
|
||||
:api: enrich-execute-policy
|
||||
:request: ExecutePolicyRequest
|
||||
:response: ExecutePolicyResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Execute Policy API
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Request
|
||||
|
||||
The Execute Policy API allows to execute an enrich policy by name.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ includes either the status or task id.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
|
||||
include::../execution.asciidoc[]
|
|
@ -0,0 +1,32 @@
|
|||
--
|
||||
:api: enrich-get-policy
|
||||
:request: GetPolicyRequest
|
||||
:response: GetPolicyResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Get Policy API
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Request
|
||||
|
||||
The Get Policy API allows to retrieve enrich policies by name
|
||||
or all policies if no name is provided.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ includes the requested enrich policy.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The actual enrich policy.
|
||||
|
||||
include::../execution.asciidoc[]
|
|
@ -0,0 +1,31 @@
|
|||
--
|
||||
:api: enrich-put-policy
|
||||
:request: PutPolicyRequest
|
||||
:response: AcknowledgedResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Put Policy API
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Request
|
||||
|
||||
The Put Policy API stores an enrich policy in Elasticsearch.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ indicates if the put policy request was acknowledged.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Whether put policy request was acknowledged.
|
||||
|
||||
include::../execution.asciidoc[]
|
|
@ -0,0 +1,33 @@
|
|||
--
|
||||
:api: enrich-stats
|
||||
:request: StatsRequest
|
||||
:response: StatsResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Stats API
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Request
|
||||
|
||||
The stats API returns enrich related stats.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ includes enrich related stats.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> List of policies that are currently executing with
|
||||
additional details.
|
||||
<2> List of coordinator stats per ingest node.
|
||||
|
||||
include::../execution.asciidoc[]
|
|
@ -635,3 +635,22 @@ include::transform/delete_transform.asciidoc[]
|
|||
include::transform/preview_transform.asciidoc[]
|
||||
include::transform/start_transform.asciidoc[]
|
||||
include::transform/stop_transform.asciidoc[]
|
||||
|
||||
== Enrich APIs
|
||||
|
||||
:upid: {mainid}-enrich
|
||||
:doc-tests-file: {doc-tests}/EnrichDocumentationIT.java
|
||||
|
||||
The Java High Level REST Client supports the following Enrich APIs:
|
||||
|
||||
* <<{upid}-enrich-put-policy>>
|
||||
* <<{upid}-enrich-delete-policy>>
|
||||
* <<{upid}-enrich-get-policy>>
|
||||
* <<{upid}-enrich-stats>>
|
||||
* <<{upid}-enrich-execute-policy>>
|
||||
|
||||
include::enrich/put_policy.asciidoc[]
|
||||
include::enrich/delete_policy.asciidoc[]
|
||||
include::enrich/get_policy.asciidoc[]
|
||||
include::enrich/stats.asciidoc[]
|
||||
include::enrich/execute_policy.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[delete-enrich-policy-api]]
|
||||
=== Delete enrich policy API
|
||||
++++
|
||||
<titleabbrev>Delete enrich policy</titleabbrev>
|
||||
++++
|
||||
|
||||
Deletes an existing enrich policy and its enrich index.
|
||||
|
||||
////
|
||||
[source,console]
|
||||
----
|
||||
PUT /users
|
||||
|
||||
PUT /_enrich/policy/my-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TESTSETUP
|
||||
////
|
||||
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
DELETE /_enrich/policy/my-policy
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
[[delete-enrich-policy-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`DELETE /_enrich/policy/<enrich-policy>`
|
||||
|
||||
|
||||
[[delete-enrich-policy-api-prereqs]]
|
||||
==== {api-prereq-title}
|
||||
|
||||
include::put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs]
|
||||
|
||||
|
||||
[[delete-enrich-policy-api-desc]]
|
||||
==== {api-description-title}
|
||||
|
||||
Use the delete enrich policy API
|
||||
to delete an existing enrich policy
|
||||
and its enrich index.
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
You must remove an enrich policy
|
||||
from any in-use ingest pipelines
|
||||
before deletion.
|
||||
You cannot remove in-use enrich policies.
|
||||
====
|
||||
|
||||
|
||||
[[delete-enrich-policy-api-path-params]]
|
||||
==== {api-path-parms-title}
|
||||
|
||||
`<enrich-policy>`::
|
||||
(Required, string)
|
||||
Enrich policy to delete.
|
|
@ -0,0 +1,135 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[enrich-stats-api]]
|
||||
=== Enrich stats API
|
||||
++++
|
||||
<titleabbrev>Enrich stats</titleabbrev>
|
||||
++++
|
||||
|
||||
Returns <<coordinating-node,enrich coordinator>> statistics
|
||||
and information about enrich policies
|
||||
that are currently executing.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
GET /_enrich/_stats
|
||||
----
|
||||
|
||||
|
||||
[[enrich-stats-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`GET /_enrich/_stats`
|
||||
|
||||
|
||||
[[enrich-stats-api-response-body]]
|
||||
==== {api-response-body-title}
|
||||
|
||||
`executing_policies`::
|
||||
+
|
||||
--
|
||||
(Array of objects)
|
||||
Objects containing information
|
||||
about each enrich policy
|
||||
that is currently executing.
|
||||
|
||||
Returned parameters include:
|
||||
|
||||
`name`::
|
||||
(String)
|
||||
Name of the enrich policy.
|
||||
|
||||
`task`::
|
||||
(<<tasks,Task object>>)
|
||||
Object containing detailed information
|
||||
about the policy execution task.
|
||||
--
|
||||
|
||||
`coordinator_stats`::
|
||||
+
|
||||
--
|
||||
(Array of objects)
|
||||
Objects containing information
|
||||
about each <<coordinating-node,coordinating ingest node>>
|
||||
for configured enrich processors.
|
||||
|
||||
Returned parameters include:
|
||||
|
||||
`node_id`::
|
||||
(String)
|
||||
ID of the ingest node coordinating search requests
|
||||
for configured enrich processors.
|
||||
|
||||
`queue_size`::
|
||||
(Integer)
|
||||
Number of search requests in the queue.
|
||||
|
||||
`remote_requests_current`::
|
||||
(Integer)
|
||||
Current number of outstanding remote requests.
|
||||
|
||||
`remote_requests_total`::
|
||||
(Integer)
|
||||
Number of outstanding remote requests executed
|
||||
since node startup.
|
||||
+
|
||||
In most cases,
|
||||
a remote request includes multiple search requests.
|
||||
This depends on the number of search requests in the queue
|
||||
when the remote request is executed.
|
||||
|
||||
`executed_searches_total`::
|
||||
(Integer)
|
||||
Number of search requests
|
||||
that enrich processors have executed
|
||||
since node startup.
|
||||
--
|
||||
|
||||
|
||||
[[enrich-stats-api-example]]
|
||||
==== {api-examples-title}
|
||||
|
||||
|
||||
[source,console]
|
||||
----
|
||||
GET /_enrich/_stats
|
||||
----
|
||||
//TEST[s/^/PUT \/_enrich\/policy\/my-policy\/_execute\/n/\
|
||||
|
||||
The API returns the following response:
|
||||
|
||||
[source,console-result]
|
||||
----
|
||||
{
|
||||
"executing_policies": [
|
||||
{
|
||||
"name": "my-policy",
|
||||
"task": {
|
||||
"id" : 124,
|
||||
"type" : "direct",
|
||||
"action" : "cluster:admin/xpack/enrich/execute",
|
||||
"start_time_in_millis" : 1458585884904,
|
||||
"running_time_in_nanos" : 47402,
|
||||
"cancellable" : false,
|
||||
"parent_task_id" : "oTUltX4IQMOUUVeiohTt8A:123",
|
||||
"headers" : {
|
||||
"X-Opaque-Id" : "123456"
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
"coordinator_stats": [
|
||||
{
|
||||
"node_id": "1sFM8cmSROZYhPxVsiWew",
|
||||
"queue_size": 0,
|
||||
"remote_requests_current": 0,
|
||||
"remote_requests_total": 0,
|
||||
"executed_searches_total": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE[s/"executing_policies": \[[^\]]*\]/"executing_policies": $body.$_path/]
|
||||
// TESTRESPONSE[s/"node_id": "1sFM8cmSROZYhPxVsiWew"/"node_id" : $body.coordinator_stats.0.node_id/]
|
||||
// TESTRESPONSE[s/"remote_requests_total": 0/"remote_requests_total" : $body.coordinator_stats.0.remote_requests_total/]
|
||||
// TESTRESPONSE[s/"executed_searches_total": 0/"executed_searches_total" : $body.coordinator_stats.0.executed_searches_total/]
|
|
@ -0,0 +1,103 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[execute-enrich-policy-api]]
|
||||
=== Execute enrich policy API
|
||||
++++
|
||||
<titleabbrev>Execute enrich policy</titleabbrev>
|
||||
++++
|
||||
|
||||
Executes an existing enrich policy.
|
||||
|
||||
////
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /users/_doc/1?refresh
|
||||
{
|
||||
"email": "mardy.brown@asciidocsmith.com",
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"city": "New Orleans",
|
||||
"county": "Orleans",
|
||||
"state": "LA",
|
||||
"zip": 70116,
|
||||
"web": "mardy.asciidocsmith.com"
|
||||
}
|
||||
|
||||
PUT /_enrich/policy/my-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TESTSETUP
|
||||
////
|
||||
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
PUT /_enrich/policy/my-policy/_execute
|
||||
--------------------------------------------------
|
||||
|
||||
////
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
DELETE /_enrich/policy/my-policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
////
|
||||
|
||||
|
||||
[[execute-enrich-policy-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`PUT /_enrich/policy/<enrich-policy>/_execute`
|
||||
|
||||
`POST /_enrich/policy/<enrich-policy>/_execute`
|
||||
|
||||
|
||||
[[execute-enrich-policy-api-prereqs]]
|
||||
==== {api-prereq-title}
|
||||
|
||||
include::put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs]
|
||||
|
||||
|
||||
[[execute-enrich-policy-api-desc]]
|
||||
==== {api-description-title}
|
||||
|
||||
Use the execute enrich policy API
|
||||
to create the enrich index for an existing enrich policy.
|
||||
|
||||
// tag::execute-enrich-policy-def[]
|
||||
The _enrich index_ contains documents from the policy's source indices.
|
||||
Enrich indices always begin with `.enrich-*`,
|
||||
are read-only,
|
||||
and are <<indices-forcemerge,force merged>>.
|
||||
|
||||
[WARNING]
|
||||
====
|
||||
Enrich indices should be used by the <<enrich-processor,enrich processor>> only.
|
||||
Avoid using enrich indices for other purposes.
|
||||
====
|
||||
// end::execute-enrich-policy-def[]
|
||||
|
||||
// tag::update-enrich-index[]
|
||||
Once created, you cannot update
|
||||
or index documents to an enrich index.
|
||||
Instead, update your source indices
|
||||
and execute the enrich policy again.
|
||||
This creates a new enrich index from your updated source indices
|
||||
and deletes the previous enrich index.
|
||||
// end::update-enrich-index[]
|
||||
|
||||
Because this API request performs several operations,
|
||||
it may take a while to return a response.
|
||||
|
||||
[[execute-enrich-policy-api-path-params]]
|
||||
==== {api-path-parms-title}
|
||||
|
||||
`<enrich-policy>`::
|
||||
(Required, string)
|
||||
Enrich policy to execute.
|
|
@ -0,0 +1,225 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[get-enrich-policy-api]]
|
||||
=== Get enrich policy API
|
||||
++++
|
||||
<titleabbrev>Get enrich policy</titleabbrev>
|
||||
++++
|
||||
|
||||
Returns information about an enrich policy.
|
||||
|
||||
////
|
||||
[source,console]
|
||||
----
|
||||
PUT /users
|
||||
|
||||
PUT /_enrich/policy/my-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
|
||||
PUT /_enrich/policy/other-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
////
|
||||
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
GET /_enrich/policy/my-policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
||||
|
||||
[[get-enrich-policy-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`GET /_enrich/policy/<enrich-policy>`
|
||||
|
||||
`GET /_enrich/policy`
|
||||
|
||||
`GET /_enrich/policy1,policy2`
|
||||
|
||||
|
||||
[[get-enrich-policy-api-prereqs]]
|
||||
==== {api-prereq-title}
|
||||
|
||||
include::put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs]
|
||||
|
||||
|
||||
[[get-enrich-policy-api-path-params]]
|
||||
==== {api-path-parms-title}
|
||||
|
||||
`<enrich-policy>`::
|
||||
+
|
||||
--
|
||||
(Optional, string)
|
||||
Comma-separated list of enrich policy names
|
||||
used to limit the request.
|
||||
|
||||
To return information for all enrich policies,
|
||||
omit this parameter.
|
||||
--
|
||||
|
||||
|
||||
[[get-enrich-policy-api-example]]
|
||||
==== {api-examples-title}
|
||||
|
||||
|
||||
[[get-enrich-policy-api-single-ex]]
|
||||
===== Get a single policy
|
||||
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
GET /_enrich/policy/my-policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
||||
The API returns the following response:
|
||||
|
||||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "my-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
[[get-enrich-policy-api-commas-ex]]
|
||||
===== Get multiple policies
|
||||
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
GET /_enrich/policy/my-policy,other-policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
||||
The API returns the following response:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "my-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "other-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
|
||||
|
||||
[[get-enrich-policy-api-all-ex]]
|
||||
===== Get all policies
|
||||
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
GET /_enrich/policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
||||
The API returns the following response:
|
||||
|
||||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "my-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "other-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
////
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
DELETE /_enrich/policy/my-policy
|
||||
DELETE /_enrich/policy/other-policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
////
|
|
@ -0,0 +1,21 @@
|
|||
[[enrich-apis]]
|
||||
== Enrich APIs
|
||||
|
||||
The following enrich APIs are available for managing enrich policies:
|
||||
|
||||
* <<put-enrich-policy-api>> to add or update an enrich policy
|
||||
* <<delete-enrich-policy-api>> to delete an enrich policy
|
||||
* <<get-enrich-policy-api>> to return information about an enrich policy
|
||||
* <<execute-enrich-policy-api>> to execute an enrich policy
|
||||
* <<enrich-stats-api>> to get enrich-related stats
|
||||
|
||||
|
||||
include::put-enrich-policy.asciidoc[]
|
||||
|
||||
include::delete-enrich-policy.asciidoc[]
|
||||
|
||||
include::get-enrich-policy.asciidoc[]
|
||||
|
||||
include::execute-enrich-policy.asciidoc[]
|
||||
|
||||
include::enrich-stats.asciidoc[]
|
|
@ -0,0 +1,349 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[put-enrich-policy-api]]
|
||||
=== Put enrich policy API
|
||||
++++
|
||||
<titleabbrev>Put enrich policy</titleabbrev>
|
||||
++++
|
||||
|
||||
Creates an enrich policy.
|
||||
|
||||
////
|
||||
[source,console]
|
||||
----
|
||||
PUT /users
|
||||
----
|
||||
////
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /_enrich/policy/my-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
////
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
DELETE /_enrich/policy/my-policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
////
|
||||
|
||||
|
||||
[[put-enrich-policy-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`PUT /_enrich/policy/<enrich-policy>`
|
||||
|
||||
|
||||
[[put-enrich-policy-api-prereqs]]
|
||||
==== {api-prereq-title}
|
||||
|
||||
// tag::enrich-policy-api-prereqs[]
|
||||
If you use {es} {security-features}, you must have:
|
||||
|
||||
* `read` index privileges for any indices used
|
||||
* The `enrich_user` {stack-ov}/built-in-roles.html[built-in role]
|
||||
// end::enrich-policy-api-prereqs[]
|
||||
|
||||
|
||||
[[put-enrich-policy-api-desc]]
|
||||
==== {api-description-title}
|
||||
|
||||
Use the put enrich policy API
|
||||
to create a new enrich policy.
|
||||
|
||||
// tag::enrich-policy-def[]
|
||||
An *enrich policy* is a set of rules the enrich processor uses
|
||||
to append the appropriate data to incoming documents.
|
||||
An enrich policy contains:
|
||||
|
||||
* The *policy type*,
|
||||
which determines how the processor enriches incoming documents
|
||||
* A list of source indices
|
||||
* The *match field* used to match incoming documents
|
||||
* *Enrich fields* appended to incoming documents
|
||||
from matching documents
|
||||
// end::enrich-policy-def[]
|
||||
|
||||
|
||||
===== Update an enrich policy
|
||||
|
||||
// tag::update-enrich-policy[]
|
||||
You cannot update an existing enrich policy.
|
||||
Instead, you can:
|
||||
|
||||
. Create and execute a new enrich policy.
|
||||
|
||||
. Replace the previous enrich policy
|
||||
with the new enrich policy
|
||||
in any in-use enrich processors.
|
||||
|
||||
. Use the <<delete-enrich-policy-api, delete enrich policy>> API
|
||||
to delete the previous enrich policy.
|
||||
// end::update-enrich-policy[]
|
||||
|
||||
|
||||
[[put-enrich-policy-api-path-params]]
|
||||
==== {api-path-parms-title}
|
||||
|
||||
`<enrich-policy>`::
|
||||
(Required, string)
|
||||
include::{docdir}/rest-api/common-parms.asciidoc[tag=enrich-policy]
|
||||
|
||||
|
||||
[[put-enrich-policy-api-request-body]]
|
||||
==== {api-request-body-title}
|
||||
|
||||
`<policy-type>`::
|
||||
+
|
||||
--
|
||||
(Required, enrich policy object)
|
||||
The parameter key is the enrich policy type.
|
||||
The enrich policy type indicates
|
||||
how the enrich processor matches incoming documents
|
||||
to documents in the enrich index.
|
||||
|
||||
Valid key values are:
|
||||
|
||||
`match`::
|
||||
Match documents in the enrich index
|
||||
using a <<query-dsl-term-query,term query>> for the `match_field`.
|
||||
See <<enrich-setup>> for an example.
|
||||
|
||||
`geo_match`::
|
||||
Match documents in the enrich index
|
||||
using a <<query-dsl-geo-shape-query,`geo_shape` query>> for the `match_field`.
|
||||
See <<put-enrich-policy-geo-match-ex>> for an example.
|
||||
|
||||
The parameter value is the enrich policy.
|
||||
The enrich policy is a set of rules
|
||||
used to create an <<execute-enrich-policy,enrich index>>.
|
||||
The enrich processor also uses these rules
|
||||
to append field data to incoming documents.
|
||||
|
||||
Parameters include:
|
||||
|
||||
`indices`::
|
||||
(Required, array of strings)
|
||||
Source indices used to create the enrich index.
|
||||
|
||||
`query`::
|
||||
(Optional, string)
|
||||
Query type used to find and select documents in the enrich index.
|
||||
Valid value is <<query-dsl-match-all-query,`match_all`>> (default).
|
||||
|
||||
`match_field`::
|
||||
(Required, string)
|
||||
Field used to match incoming documents
|
||||
to documents in the enrich index.
|
||||
|
||||
`enrich_fields`::
|
||||
(Required, Array of string)
|
||||
Fields appended to incoming documents
|
||||
from matching documents in the enrich index.
|
||||
--
|
||||
|
||||
[[put-enrich-policy-api-example]]
|
||||
==== {api-examples-title}
|
||||
|
||||
[[put-enrich-policy-geo-match-ex]]
|
||||
===== `geo_match` policy type
|
||||
|
||||
You can use the `geo_match` enrich policy type
|
||||
to enrich incoming documents
|
||||
based on matching geo_shapes.
|
||||
For example,
|
||||
you can add postal codes
|
||||
to incoming documents
|
||||
based on a set of coordinates.
|
||||
|
||||
To see how the `geo_match` policy type works,
|
||||
try the following example.
|
||||
|
||||
Use the <<indices-create-index, create index API>>
|
||||
to create a source index.
|
||||
The field mappings for the source index
|
||||
must contain:
|
||||
|
||||
* A <<geo-shape,`geo_shape`>> field
|
||||
which the enrich processor can use to match incoming documents
|
||||
* One or more enrich fields
|
||||
you'd like to append to incoming documents
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /postal_codes
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
},
|
||||
"postal_code": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
Use the <<docs-index_,index API>>
|
||||
to index data to this source index.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /postal_codes/_doc/1?refresh=wait_for
|
||||
{
|
||||
"location": {
|
||||
"type": "envelope",
|
||||
"coordinates": [[13.0, 53.0], [14.0, 52.0]]
|
||||
},
|
||||
"postal_code": "96598"
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
Use the put enrich policy API
|
||||
to create an enrich policy
|
||||
with the `geo_match` policy type.
|
||||
This policy must include:
|
||||
|
||||
* One or more source indices
|
||||
* A `match_field`,
|
||||
the `geo_shape` field from the source indices
|
||||
used to match incoming documents
|
||||
* Enrich fields from the source indices
|
||||
you'd like to append to incoming documents
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /_enrich/policy/postal_policy
|
||||
{
|
||||
"geo_match": {
|
||||
"indices": "postal_codes",
|
||||
"match_field": "location",
|
||||
"enrich_fields": ["location","postal_code"]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
Use the <<execute-enrich-policy-api,execute enrich policy API>>
|
||||
to create an enrich index for the policy.
|
||||
|
||||
include::execute-enrich-policy.asciidoc[tag=execute-enrich-policy-def]
|
||||
|
||||
[source,console]
|
||||
----
|
||||
POST /_enrich/policy/postal_policy/_execute
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
Use the <<put-pipeline-api,put pipeline API>>
|
||||
to create an ingest pipeline.
|
||||
In the pipeline,
|
||||
add an <<enrich-processor,enrich processor>>
|
||||
that includes:
|
||||
|
||||
* Your enrich policy
|
||||
* The `field` of incoming documents used
|
||||
to match the geo_shape of documents from the enrich index.
|
||||
* The `target_field` used
|
||||
to store appended enrich data for incoming documents.
|
||||
* The `shape_relation`,
|
||||
which indicates how the processor matches geo_shapes in incoming documents
|
||||
to geo_shapes in documents from the enrich index.
|
||||
See <<_spatial_relations>> for valid options and more information.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /_ingest/pipeline/postal_lookup
|
||||
{
|
||||
"description": "Enrich postal codes",
|
||||
"processors": [
|
||||
{
|
||||
"enrich": {
|
||||
"policy_name": "postal_policy",
|
||||
"field": "geo_location",
|
||||
"target_field": "geo_data",
|
||||
"shape_relation": "INTERSECTS"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
Use the ingest pipeline
|
||||
to index a document.
|
||||
The incoming document
|
||||
should include the `field`
|
||||
specified in your enrich processor.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /users/_doc/0?pipeline=postal_lookup
|
||||
{
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"geo_location": "POINT (13.5 52.5)"
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
To verify the enrich processor matched
|
||||
and appended the appropriate field data,
|
||||
use the <<docs-get,get API>>
|
||||
to view the indexed document.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
GET /users/_doc/0
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
The API returns the following response:
|
||||
|
||||
[source,console-result]
|
||||
----
|
||||
{
|
||||
"found": true,
|
||||
"_index": "users",
|
||||
"_type": "_doc",
|
||||
"_id": "0",
|
||||
"_version": 1,
|
||||
"_seq_no": 55,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"geo_data": {
|
||||
"location": {
|
||||
"type": "envelope",
|
||||
"coordinates": [[13.0, 53.0], [14.0, 52.0]]
|
||||
},
|
||||
"postal_code": "96598"
|
||||
},
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"geo_location": "POINT (13.5 52.5)"
|
||||
}
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term":1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
////
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
DELETE /_ingest/pipeline/postal_lookup
|
||||
|
||||
DELETE /_enrich/policy/postal_policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
////
|
|
@ -0,0 +1,288 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[ingest-enriching-data]]
|
||||
== Enrich your data
|
||||
|
||||
You can use the <<enrich-processor,enrich processor>>
|
||||
to append data from existing indices
|
||||
to incoming documents during ingest.
|
||||
|
||||
For example, you can use the enrich processor to:
|
||||
|
||||
* Identify web services or vendors based on known IP addresses
|
||||
* Add product information to retail orders based on product IDs
|
||||
* Supplement contact information based on an email address
|
||||
* Add postal codes based on user coordinates
|
||||
|
||||
|
||||
[float]
|
||||
[[enrich-setup]]
|
||||
=== Set up an enrich processor
|
||||
|
||||
To set up an enrich processor and learn how it works,
|
||||
follow these steps:
|
||||
|
||||
. Check the <<enrich-prereqs, prerequisites>>.
|
||||
. <<create-enrich-source-index>>.
|
||||
. <<create-enrich-policy>>.
|
||||
. <<execute-enrich-policy>>.
|
||||
. <<add-enrich-processor>>.
|
||||
. <<ingest-enrich-docs>>.
|
||||
|
||||
Once you have an enrich processor set up,
|
||||
you can <<update-enrich-data,update your enrich data>>
|
||||
and <<update-enrich-policies, update your enrich policies>>
|
||||
using the <<enrich-apis,enrich APIs>>.
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
The enrich processor performs several operations
|
||||
and may impact the speed of your <<pipeline,ingest pipeline>>.
|
||||
|
||||
We strongly recommend testing and benchmarking your enrich processors
|
||||
before deploying them in production.
|
||||
|
||||
We do not recommend using the enrich processor to append real-time data.
|
||||
The enrich processor works best with reference data
|
||||
that doesn't change frequently.
|
||||
====
|
||||
|
||||
[float]
|
||||
[[enrich-prereqs]]
|
||||
==== Prerequisites
|
||||
|
||||
include::{docdir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs]
|
||||
|
||||
[float]
|
||||
[[create-enrich-source-index]]
|
||||
==== Create a source index
|
||||
|
||||
To begin,
|
||||
create one or more source indices.
|
||||
|
||||
A _source index_ contains data you want to append to incoming documents.
|
||||
You can index and manage documents in a source index
|
||||
like a regular index.
|
||||
|
||||
The following <<docs-index_,index API>> request creates the `users` source index
|
||||
containing user data.
|
||||
This request also indexes a new document to the `users` source index.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /users/_doc/1?refresh=wait_for
|
||||
{
|
||||
"email": "mardy.brown@asciidocsmith.com",
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"city": "New Orleans",
|
||||
"county": "Orleans",
|
||||
"state": "LA",
|
||||
"zip": 70116,
|
||||
"web": "mardy.asciidocsmith.com"
|
||||
}
|
||||
----
|
||||
|
||||
You also can set up {beats-ref}/getting-started.html[{beats}],
|
||||
such as a {filebeat-ref}/filebeat-getting-started.html[{filebeat}],
|
||||
to automatically send and index documents
|
||||
to your source indices.
|
||||
See {beats-ref}/getting-started.html[Getting started with {beats}].
|
||||
|
||||
|
||||
[float]
|
||||
[[create-enrich-policy]]
|
||||
==== Create an enrich policy
|
||||
|
||||
Use the <<put-enrich-policy-api,put enrich policy API>>
|
||||
to create an enrich policy.
|
||||
|
||||
include::{docdir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-def]
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /_enrich/policy/users-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
|
||||
[float]
|
||||
[[execute-enrich-policy]]
|
||||
==== Execute an enrich policy
|
||||
|
||||
Use the <<execute-enrich-policy-api,execute enrich policy API>>
|
||||
to create an enrich index for the policy.
|
||||
|
||||
include::apis/enrich/execute-enrich-policy.asciidoc[tag=execute-enrich-policy-def]
|
||||
|
||||
The following request executes the `users-policy` enrich policy.
|
||||
Because this API request performs several operations,
|
||||
it may take a while to return a response.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
POST /_enrich/policy/users-policy/_execute
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
|
||||
[float]
|
||||
[[add-enrich-processor]]
|
||||
==== Add the enrich processor to an ingest pipeline
|
||||
|
||||
Use the <<put-pipeline-api,put pipeline API>>
|
||||
to create an ingest pipeline.
|
||||
Include an <<enrich-processor,enrich processor>>
|
||||
that uses your enrich policy.
|
||||
|
||||
When defining an enrich processor,
|
||||
you must include the following:
|
||||
|
||||
* The field used to match incoming documents
|
||||
to documents in the enrich index.
|
||||
+
|
||||
This field should be included in incoming documents.
|
||||
|
||||
* The target field added to incoming documents.
|
||||
This field contains all appended enrich data.
|
||||
|
||||
The following request adds a new pipeline, `user_lookup`.
|
||||
This pipeline includes an enrich processor
|
||||
that uses the `users-policy` enrich policy.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /_ingest/pipeline/user_lookup
|
||||
{
|
||||
"description" : "Enriching user details to messages",
|
||||
"processors" : [
|
||||
{
|
||||
"enrich" : {
|
||||
"policy_name": "users-policy",
|
||||
"field" : "email",
|
||||
"target_field": "user",
|
||||
"max_matches": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
Because the enrich policy type is `match`,
|
||||
the enrich processor matches incoming documents
|
||||
to documents in the enrich index
|
||||
based on match field values.
|
||||
The enrich processor then appends the enrich field data
|
||||
from matching documents in the enrich index
|
||||
to the target field of incoming documents.
|
||||
|
||||
Because the `max_matches` option for the enrich processor is `1`,
|
||||
the enrich processor appends the data from only the best matching document
|
||||
to each incoming document's target field as an object.
|
||||
|
||||
If the `max_matches` option were greater than `1`,
|
||||
the processor could append data from up to the `max_matches` number of documents
|
||||
to the target field as an array.
|
||||
|
||||
If the incoming document matches no documents in the enrich index,
|
||||
the processor appends no data.
|
||||
|
||||
You also can add other <<ingest-processors,processors>>
|
||||
to your ingest pipeline.
|
||||
You can use these processors to change or drop incoming documents
|
||||
based on your criteria.
|
||||
See <<ingest-processors>> for a list of built-in processors.
|
||||
|
||||
|
||||
[float]
|
||||
[[ingest-enrich-docs]]
|
||||
==== Ingest and enrich documents
|
||||
|
||||
Index incoming documents using your ingest pipeline.
|
||||
|
||||
The following <<docs-index_,index API>> request uses the ingest pipeline
|
||||
to index a document
|
||||
containing the `email` field
|
||||
specified in the enrich processor.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
PUT /my_index/_doc/my_id?pipeline=user_lookup
|
||||
{
|
||||
"email": "mardy.brown@asciidocsmith.com"
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
To verify the enrich processor matched
|
||||
and appended the appropriate field data,
|
||||
use the <<docs-get,get API>> to view the indexed document.
|
||||
|
||||
[source,console]
|
||||
----
|
||||
GET /my_index/_doc/my_id
|
||||
----
|
||||
// TEST[continued]
|
||||
|
||||
The API returns the following response:
|
||||
|
||||
[source,console-result]
|
||||
----
|
||||
{
|
||||
"found": true,
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "my_id",
|
||||
"_version": 1,
|
||||
"_seq_no": 55,
|
||||
"_primary_term": 1,
|
||||
"_source": {
|
||||
"user": {
|
||||
"email": "mardy.brown@asciidocsmith.com",
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"zip": 70116,
|
||||
"city": "New Orleans",
|
||||
"state": "LA"
|
||||
},
|
||||
"email": "mardy.brown@asciidocsmith.com"
|
||||
}
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term":1/"_primary_term" : $body._primary_term/]
|
||||
|
||||
|
||||
[float]
|
||||
[[update-enrich-data]]
|
||||
=== Update your enrich index
|
||||
|
||||
include::{docdir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=update-enrich-index]
|
||||
|
||||
If wanted, you can <<docs-reindex,reindex>>
|
||||
or <<docs-update-by-query,update>> any already ingested documents
|
||||
using your ingest pipeline.
|
||||
|
||||
|
||||
[float]
|
||||
[[update-enrich-policies]]
|
||||
=== Update an enrich policy
|
||||
|
||||
include::apis/enrich/put-enrich-policy.asciidoc[tag=update-enrich-policy]
|
||||
|
||||
////
|
||||
[source,console]
|
||||
--------------------------------------------------
|
||||
DELETE /_ingest/pipeline/user_lookup
|
||||
|
||||
DELETE /_enrich/policy/users-policy
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
////
|
|
@ -753,6 +753,10 @@ metadata field to provide the error message.
|
|||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
|
||||
include::enrich.asciidoc[]
|
||||
|
||||
|
||||
[[ingest-processors]]
|
||||
== Processors
|
||||
|
||||
|
@ -829,6 +833,7 @@ include::processors/date-index-name.asciidoc[]
|
|||
include::processors/dissect.asciidoc[]
|
||||
include::processors/dot-expand.asciidoc[]
|
||||
include::processors/drop.asciidoc[]
|
||||
include::processors/enrich.asciidoc[]
|
||||
include::processors/fail.asciidoc[]
|
||||
include::processors/foreach.asciidoc[]
|
||||
include::processors/geoip.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[enrich-processor]]
|
||||
=== Enrich Processor
|
||||
|
||||
The `enrich` processor can enrich documents with data from another index.
|
||||
See <<ingest-enriching-data,enrich data>> section for more information how to set this up and
|
||||
check out the <<ingest-enriching-data,tutorial>> to get familiar with enrich policies and related APIs.
|
||||
|
||||
[[enrich-options]]
|
||||
.Enrich Options
|
||||
[options="header"]
|
||||
|======
|
||||
| Name | Required | Default | Description
|
||||
| `policy_name` | yes | - | The name of the enrich policy to use.
|
||||
| `field` | yes | - | The field in the input document that matches the policies match_field used to retrieve the enrichment data.
|
||||
| `target_field` | yes | - | The field that will be used for the enrichment data.
|
||||
| `ignore_missing` | no | false | If `true` and `field` does not exist, the processor quietly exits without modifying the document
|
||||
| `override` | no | true | If processor will update fields with pre-existing non-null-valued field. When set to `false`, such fields will not be touched.
|
||||
| `max_matches` | no | 1 | The maximum number of matched documents to include under the configured target field. The `target_field` will be turned into a json array if `max_matches` is higher than 1, otherwise `target_field` will become a json object. In order to avoid documents getting too large, the maximum allowed value is 128.
|
||||
| `shape_relation` | no | `INTERSECTS` | A spatial relation operator used to match the <<geo-shape,geo_shape>> of incoming documents to documents in the enrich index. This option is only used for `geo_match` enrich policy types. The <<spatial-strategy, geo_shape strategy>> mapping parameter determines which spatial relation operators are available. See <<_spatial_relations>> for operators and more information.
|
||||
|
||||
include::common-options.asciidoc[]
|
||||
|======
|
|
@ -77,7 +77,7 @@ tag::committed[]
|
|||
If `true`,
|
||||
the segments is synced to disk. Segments that are synced can survive a hard reboot.
|
||||
+
|
||||
If `false`,
|
||||
If `false`,
|
||||
the data from uncommitted segments is also stored in
|
||||
the transaction log so that Elasticsearch is able to replay
|
||||
changes on the next start.
|
||||
|
@ -122,6 +122,11 @@ is based on Lucene documents. {es} reclaims the disk space of deleted Lucene
|
|||
documents when a segment is merged.
|
||||
end::docs-deleted[]
|
||||
|
||||
tag::enrich-policy[]
|
||||
Enrich policy name
|
||||
used to limit the request.
|
||||
end::enrich-policy[]
|
||||
|
||||
tag::expand-wildcards[]
|
||||
`expand_wildcards`::
|
||||
+
|
||||
|
@ -279,8 +284,8 @@ end::include-defaults[]
|
|||
tag::include-segment-file-sizes[]
|
||||
`include_segment_file_sizes`::
|
||||
(Optional, boolean)
|
||||
If `true`, the call reports the aggregated disk usage of
|
||||
each one of the Lucene index files (only applies if segment stats are
|
||||
If `true`, the call reports the aggregated disk usage of
|
||||
each one of the Lucene index files (only applies if segment stats are
|
||||
requested). Defaults to `false`.
|
||||
end::include-segment-file-sizes[]
|
||||
|
||||
|
@ -504,7 +509,7 @@ end::positions[]
|
|||
|
||||
tag::preference[]
|
||||
`preference`::
|
||||
(Optional, string) Specifies the node or shard the operation should be
|
||||
(Optional, string) Specifies the node or shard the operation should be
|
||||
performed on. Random by default.
|
||||
end::preference[]
|
||||
|
||||
|
@ -652,7 +657,7 @@ end::source_includes[]
|
|||
|
||||
tag::stats[]
|
||||
`stats`::
|
||||
(Optional, string) Specific `tag` of the request for logging and statistical
|
||||
(Optional, string) Specific `tag` of the request for logging and statistical
|
||||
purposes.
|
||||
end::stats[]
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ not be included yet.
|
|||
* <<cluster, Cluster APIs>>
|
||||
* <<ccr-apis,{ccr-cap} APIs>>
|
||||
* <<docs, Document APIs>>
|
||||
* <<enrich-apis,Enrich APIs>>
|
||||
* <<graph-explore-api,Graph Explore API>>
|
||||
* <<indices, Index APIs>>
|
||||
* <<index-lifecycle-management-api,Index lifecycle management APIs>>
|
||||
|
@ -38,6 +39,7 @@ include::{es-repo-dir}/cat.asciidoc[]
|
|||
include::{es-repo-dir}/cluster.asciidoc[]
|
||||
include::{es-repo-dir}/ccr/apis/ccr-apis.asciidoc[]
|
||||
include::{es-repo-dir}/docs.asciidoc[]
|
||||
include::{es-repo-dir}/ingest/apis/enrich/index.asciidoc[]
|
||||
include::{es-repo-dir}/graph/explore.asciidoc[]
|
||||
include::{es-repo-dir}/indices.asciidoc[]
|
||||
include::{es-repo-dir}/ilm/apis/ilm-api.asciidoc[]
|
||||
|
|
|
@ -74,7 +74,7 @@ class SimulateExecutionService {
|
|||
responses.add(response);
|
||||
}
|
||||
if (counter.incrementAndGet() == request.getDocuments().size()) {
|
||||
l.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(),
|
||||
listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(),
|
||||
request.isVerbose(), responses));
|
||||
}
|
||||
});
|
||||
|
|
|
@ -121,7 +121,7 @@ public class MultiSearchResponse extends ActionResponse implements Iterable<Mult
|
|||
private final Item[] items;
|
||||
private final long tookInMillis;
|
||||
|
||||
MultiSearchResponse(StreamInput in) throws IOException {
|
||||
public MultiSearchResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
items = new Item[in.readVInt()];
|
||||
for (int i = 0; i < items.length; i++) {
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.ingest.IngestDocument;
|
|||
import org.elasticsearch.ingest.Pipeline;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
|
|
@ -60,6 +60,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
|||
GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
|
||||
assertThat(geoShapeFieldMapper.fieldType().orientation(),
|
||||
equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION.value()));
|
||||
assertThat(geoShapeFieldMapper.fieldType.hasDocValues(), equalTo(false));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -68,6 +68,7 @@ A successful call returns an object with "cluster" and "index" fields.
|
|||
"manage_api_key",
|
||||
"manage_ccr",
|
||||
"manage_data_frame_transforms",
|
||||
"manage_enrich",
|
||||
"manage_ilm",
|
||||
"manage_index_templates",
|
||||
"manage_ingest_pipelines",
|
||||
|
|
|
@ -680,6 +680,22 @@ public class XPackLicenseState {
|
|||
return localStatus.active;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the enrich processor and related APIs are allowed to be used.
|
||||
* <p>
|
||||
* This is available in for all license types except
|
||||
* {@link OperationMode#MISSING}
|
||||
*
|
||||
* @return {@code true} as long as the license is valid. Otherwise
|
||||
* {@code false}.
|
||||
*/
|
||||
public boolean isEnrichAllowed() {
|
||||
// status is volatile
|
||||
Status localStatus = status;
|
||||
// Should work on all active licenses
|
||||
return localStatus.active;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if SQL support should be enabled.
|
||||
* <p>
|
||||
|
|
|
@ -17,6 +17,7 @@ import org.elasticsearch.protocol.xpack.frozen.FreezeResponse;
|
|||
import org.elasticsearch.xpack.core.action.XPackInfoAction;
|
||||
import org.elasticsearch.xpack.core.action.XPackInfoRequestBuilder;
|
||||
import org.elasticsearch.xpack.core.ccr.client.CcrClient;
|
||||
import org.elasticsearch.xpack.core.enrich.client.EnrichClient;
|
||||
import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction;
|
||||
import org.elasticsearch.xpack.core.ilm.client.ILMClient;
|
||||
import org.elasticsearch.xpack.core.ml.client.MachineLearningClient;
|
||||
|
@ -43,6 +44,7 @@ public class XPackClient {
|
|||
private final WatcherClient watcherClient;
|
||||
private final MachineLearningClient machineLearning;
|
||||
private final ILMClient ilmClient;
|
||||
private final EnrichClient enrichClient;
|
||||
|
||||
public XPackClient(Client client) {
|
||||
this.client = Objects.requireNonNull(client, "client");
|
||||
|
@ -53,6 +55,7 @@ public class XPackClient {
|
|||
this.watcherClient = new WatcherClient(client);
|
||||
this.machineLearning = new MachineLearningClient(client);
|
||||
this.ilmClient = new ILMClient(client);
|
||||
this.enrichClient = new EnrichClient(client);
|
||||
}
|
||||
|
||||
public Client es() {
|
||||
|
@ -87,6 +90,10 @@ public class XPackClient {
|
|||
return ilmClient;
|
||||
}
|
||||
|
||||
public EnrichClient enrichClient() {
|
||||
return enrichClient;
|
||||
}
|
||||
|
||||
public XPackClient withHeaders(Map<String, String> headers) {
|
||||
return new XPackClient(client.filterWithHeader(headers));
|
||||
}
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
package org.elasticsearch.xpack.core;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.NamedDiff;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
|
@ -43,6 +43,10 @@ import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata;
|
|||
import org.elasticsearch.xpack.core.ccr.CCRFeatureSet;
|
||||
import org.elasticsearch.xpack.core.analytics.AnalyticsFeatureSetUsage;
|
||||
import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.flattened.FlattenedFeatureSetUsage;
|
||||
import org.elasticsearch.xpack.core.frozen.FrozenIndicesFeatureSetUsage;
|
||||
import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction;
|
||||
|
@ -436,8 +440,13 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl
|
|||
DeleteTransformAction.INSTANCE,
|
||||
GetTransformAction.INSTANCE,
|
||||
GetTransformStatsAction.INSTANCE,
|
||||
PreviewTransformAction.INSTANCE
|
||||
);
|
||||
PreviewTransformAction.INSTANCE,
|
||||
// enrich
|
||||
DeleteEnrichPolicyAction.INSTANCE,
|
||||
ExecuteEnrichPolicyAction.INSTANCE,
|
||||
GetEnrichPolicyAction.INSTANCE,
|
||||
PutEnrichPolicyAction.INSTANCE
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -613,7 +622,7 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl
|
|||
TransformState::fromXContent),
|
||||
new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(TransformField.TASK_NAME),
|
||||
TransformState::fromXContent)
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -40,6 +40,11 @@ public class XPackSettings {
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* Setting for controlling whether or not enrich is enabled.
|
||||
*/
|
||||
public static final Setting<Boolean> ENRICH_ENABLED_SETTING = Setting.boolSetting("xpack.enrich.enabled", true, Property.NodeScope);
|
||||
|
||||
/**
|
||||
* Setting for controlling whether or not CCR is enabled.
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,378 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.enrich;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Represents an enrich policy including its configuration.
|
||||
*/
|
||||
public final class EnrichPolicy implements Writeable, ToXContentFragment {
|
||||
|
||||
public static final String ENRICH_INDEX_NAME_BASE = ".enrich-";
|
||||
|
||||
public static final String MATCH_TYPE = "match";
|
||||
public static final String GEO_MATCH_TYPE = "geo_match";
|
||||
public static final String[] SUPPORTED_POLICY_TYPES = new String[]{
|
||||
MATCH_TYPE,
|
||||
GEO_MATCH_TYPE
|
||||
};
|
||||
|
||||
private static final ParseField QUERY = new ParseField("query");
|
||||
private static final ParseField INDICES = new ParseField("indices");
|
||||
private static final ParseField MATCH_FIELD = new ParseField("match_field");
|
||||
private static final ParseField ENRICH_FIELDS = new ParseField("enrich_fields");
|
||||
private static final ParseField ELASTICSEARCH_VERSION = new ParseField("elasticsearch_version");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<EnrichPolicy, String> PARSER = new ConstructingObjectParser<>(
|
||||
"policy",
|
||||
false,
|
||||
(args, policyType) -> new EnrichPolicy(
|
||||
policyType,
|
||||
(QuerySource) args[0],
|
||||
(List<String>) args[1],
|
||||
(String) args[2],
|
||||
(List<String>) args[3],
|
||||
(Version) args[4]
|
||||
)
|
||||
);
|
||||
|
||||
static {
|
||||
declareCommonConstructorParsingOptions(PARSER);
|
||||
}
|
||||
|
||||
private static <T> void declareCommonConstructorParsingOptions(ConstructingObjectParser<T, ?> parser) {
|
||||
parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> {
|
||||
XContentBuilder contentBuilder = XContentBuilder.builder(p.contentType().xContent());
|
||||
contentBuilder.generator().copyCurrentStructure(p);
|
||||
return new QuerySource(BytesReference.bytes(contentBuilder), contentBuilder.contentType());
|
||||
}, QUERY);
|
||||
parser.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES);
|
||||
parser.declareString(ConstructingObjectParser.constructorArg(), MATCH_FIELD);
|
||||
parser.declareStringArray(ConstructingObjectParser.constructorArg(), ENRICH_FIELDS);
|
||||
parser.declareField(ConstructingObjectParser.optionalConstructorArg(), ((p, c) -> Version.fromString(p.text())),
|
||||
ELASTICSEARCH_VERSION, ValueType.STRING);
|
||||
}
|
||||
|
||||
public static EnrichPolicy fromXContent(XContentParser parser) throws IOException {
|
||||
Token token = parser.currentToken();
|
||||
if (token != Token.START_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
if (token != Token.START_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (token != Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
String policyType = parser.currentName();
|
||||
EnrichPolicy policy = PARSER.parse(parser, policyType);
|
||||
token = parser.nextToken();
|
||||
if (token != Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
return policy;
|
||||
}
|
||||
|
||||
private final String type;
|
||||
private final QuerySource query;
|
||||
private final List<String> indices;
|
||||
private final String matchField;
|
||||
private final List<String> enrichFields;
|
||||
private final Version elasticsearchVersion;
|
||||
|
||||
public EnrichPolicy(StreamInput in) throws IOException {
|
||||
this(
|
||||
in.readString(),
|
||||
in.readOptionalWriteable(QuerySource::new),
|
||||
in.readStringList(),
|
||||
in.readString(),
|
||||
in.readStringList(),
|
||||
Version.readVersion(in)
|
||||
);
|
||||
}
|
||||
|
||||
public EnrichPolicy(String type,
|
||||
QuerySource query,
|
||||
List<String> indices,
|
||||
String matchField,
|
||||
List<String> enrichFields) {
|
||||
this(type, query, indices, matchField, enrichFields, Version.CURRENT);
|
||||
}
|
||||
|
||||
public EnrichPolicy(String type,
|
||||
QuerySource query,
|
||||
List<String> indices,
|
||||
String matchField,
|
||||
List<String> enrichFields,
|
||||
Version elasticsearchVersion) {
|
||||
this.type = type;
|
||||
this.query = query;
|
||||
this.indices = indices;
|
||||
this.matchField = matchField;
|
||||
this.enrichFields = enrichFields;
|
||||
this.elasticsearchVersion = elasticsearchVersion != null ? elasticsearchVersion : Version.CURRENT;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public QuerySource getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
public List<String> getIndices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
public String getMatchField() {
|
||||
return matchField;
|
||||
}
|
||||
|
||||
public List<String> getEnrichFields() {
|
||||
return enrichFields;
|
||||
}
|
||||
|
||||
public Version getElasticsearchVersion() {
|
||||
return elasticsearchVersion;
|
||||
}
|
||||
|
||||
public static String getBaseName(String policyName) {
|
||||
return ENRICH_INDEX_NAME_BASE + policyName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(type);
|
||||
out.writeOptionalWriteable(query);
|
||||
out.writeStringCollection(indices);
|
||||
out.writeString(matchField);
|
||||
out.writeStringCollection(enrichFields);
|
||||
Version.writeVersion(elasticsearchVersion, out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(type);
|
||||
{
|
||||
toInnerXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
private void toInnerXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (query != null) {
|
||||
builder.field(QUERY.getPreferredName(), query.getQueryAsMap());
|
||||
}
|
||||
builder.array(INDICES.getPreferredName(), indices.toArray(new String[0]));
|
||||
builder.field(MATCH_FIELD.getPreferredName(), matchField);
|
||||
builder.array(ENRICH_FIELDS.getPreferredName(), enrichFields.toArray(new String[0]));
|
||||
if (params.paramAsBoolean("include_version", false) && elasticsearchVersion != null) {
|
||||
builder.field(ELASTICSEARCH_VERSION.getPreferredName(), elasticsearchVersion.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
EnrichPolicy policy = (EnrichPolicy) o;
|
||||
return type.equals(policy.type) &&
|
||||
Objects.equals(query, policy.query) &&
|
||||
indices.equals(policy.indices) &&
|
||||
matchField.equals(policy.matchField) &&
|
||||
enrichFields.equals(policy.enrichFields) &&
|
||||
elasticsearchVersion.equals(policy.elasticsearchVersion);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(
|
||||
type,
|
||||
query,
|
||||
indices,
|
||||
matchField,
|
||||
enrichFields,
|
||||
elasticsearchVersion
|
||||
);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
public static class QuerySource implements Writeable {
|
||||
|
||||
private final BytesReference query;
|
||||
private final XContentType contentType;
|
||||
|
||||
QuerySource(StreamInput in) throws IOException {
|
||||
this(in.readBytesReference(), in.readEnum(XContentType.class));
|
||||
}
|
||||
|
||||
public QuerySource(BytesReference query, XContentType contentType) {
|
||||
this.query = query;
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
public BytesReference getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
public Map<String, Object> getQueryAsMap() {
|
||||
return XContentHelper.convertToMap(query, true, contentType).v2();
|
||||
}
|
||||
|
||||
public XContentType getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeBytesReference(query);
|
||||
out.writeEnum(contentType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
QuerySource that = (QuerySource) o;
|
||||
return query.equals(that.query) &&
|
||||
contentType == that.contentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(query, contentType);
|
||||
}
|
||||
}
|
||||
|
||||
public static class NamedPolicy implements Writeable, ToXContentFragment {
|
||||
|
||||
static final ParseField NAME = new ParseField("name");
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<NamedPolicy, String> PARSER = new ConstructingObjectParser<>(
|
||||
"named_policy",
|
||||
false,
|
||||
(args, policyType) -> new NamedPolicy(
|
||||
(String) args[0],
|
||||
new EnrichPolicy(policyType,
|
||||
(QuerySource) args[1],
|
||||
(List<String>) args[2],
|
||||
(String) args[3],
|
||||
(List<String>) args[4],
|
||||
(Version) args[5])
|
||||
)
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
|
||||
declareCommonConstructorParsingOptions(PARSER);
|
||||
}
|
||||
|
||||
private final String name;
|
||||
private final EnrichPolicy policy;
|
||||
|
||||
public NamedPolicy(String name, EnrichPolicy policy) {
|
||||
this.name = name;
|
||||
this.policy = policy;
|
||||
}
|
||||
|
||||
public NamedPolicy(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
policy = new EnrichPolicy(in);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public EnrichPolicy getPolicy() {
|
||||
return policy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
policy.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(policy.type);
|
||||
{
|
||||
builder.field(NAME.getPreferredName(), name);
|
||||
policy.toInnerXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static NamedPolicy fromXContent(XContentParser parser) throws IOException {
|
||||
Token token = parser.currentToken();
|
||||
if (token != Token.START_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
if (token != Token.START_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (token != Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
String policyType = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token != Token.START_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
NamedPolicy policy = PARSER.parse(parser, policyType);
|
||||
token = parser.nextToken();
|
||||
if (token != Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token");
|
||||
}
|
||||
return policy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
NamedPolicy that = (NamedPolicy) o;
|
||||
return name.equals(that.name) &&
|
||||
policy.equals(that.policy);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, policy);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DeleteEnrichPolicyAction extends ActionType<AcknowledgedResponse> {
|
||||
|
||||
public static final DeleteEnrichPolicyAction INSTANCE = new DeleteEnrichPolicyAction();
|
||||
public static final String NAME = "cluster:admin/xpack/enrich/delete";
|
||||
|
||||
private DeleteEnrichPolicyAction() {
|
||||
super(NAME, AcknowledgedResponse::new);
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeRequest<DeleteEnrichPolicyAction.Request> {
|
||||
|
||||
private final String name;
|
||||
|
||||
public Request(String name) {
|
||||
this.name = Objects.requireNonNull(name, "name cannot be null");
|
||||
}
|
||||
|
||||
public Request(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
this.name = in.readString();
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return name.equals(request.name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,249 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.tasks.TaskInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class EnrichStatsAction extends ActionType<EnrichStatsAction.Response> {
|
||||
|
||||
public static final EnrichStatsAction INSTANCE = new EnrichStatsAction();
|
||||
public static final String NAME = "cluster:admin/xpack/enrich/stats";
|
||||
|
||||
private EnrichStatsAction() {
|
||||
super(NAME, Response::new);
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeRequest<Request> {
|
||||
|
||||
public Request() {
|
||||
}
|
||||
|
||||
public Request(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private final List<ExecutingPolicy> executingPolicies;
|
||||
private final List<CoordinatorStats> coordinatorStats;
|
||||
|
||||
public Response(List<ExecutingPolicy> executingPolicies, List<CoordinatorStats> coordinatorStats) {
|
||||
this.executingPolicies = executingPolicies;
|
||||
this.coordinatorStats = coordinatorStats;
|
||||
}
|
||||
|
||||
public Response(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
executingPolicies = in.readList(ExecutingPolicy::new);
|
||||
coordinatorStats = in.readList(CoordinatorStats::new);
|
||||
}
|
||||
|
||||
public List<ExecutingPolicy> getExecutingPolicies() {
|
||||
return executingPolicies;
|
||||
}
|
||||
|
||||
public List<CoordinatorStats> getCoordinatorStats() {
|
||||
return coordinatorStats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeList(executingPolicies);
|
||||
out.writeList(coordinatorStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.startArray("executing_policies");
|
||||
for (ExecutingPolicy policy : executingPolicies) {
|
||||
builder.startObject();
|
||||
policy.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
builder.startArray("coordinator_stats");
|
||||
for (CoordinatorStats entry : coordinatorStats) {
|
||||
builder.startObject();
|
||||
entry.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Response response = (Response) o;
|
||||
return executingPolicies.equals(response.executingPolicies) &&
|
||||
coordinatorStats.equals(response.coordinatorStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(executingPolicies, coordinatorStats);
|
||||
}
|
||||
|
||||
public static class CoordinatorStats implements Writeable, ToXContentFragment {
|
||||
|
||||
private final String nodeId;
|
||||
private final int queueSize;
|
||||
private final int remoteRequestsCurrent;
|
||||
private final long remoteRequestsTotal;
|
||||
private final long executedSearchesTotal;
|
||||
|
||||
public CoordinatorStats(String nodeId,
|
||||
int queueSize,
|
||||
int remoteRequestsCurrent,
|
||||
long remoteRequestsTotal,
|
||||
long executedSearchesTotal) {
|
||||
this.nodeId = nodeId;
|
||||
this.queueSize = queueSize;
|
||||
this.remoteRequestsCurrent = remoteRequestsCurrent;
|
||||
this.remoteRequestsTotal = remoteRequestsTotal;
|
||||
this.executedSearchesTotal = executedSearchesTotal;
|
||||
}
|
||||
|
||||
public CoordinatorStats(StreamInput in) throws IOException {
|
||||
this(in.readString(), in.readVInt(), in.readVInt(), in.readVLong(), in.readVLong());
|
||||
}
|
||||
|
||||
public String getNodeId() {
|
||||
return nodeId;
|
||||
}
|
||||
|
||||
public int getQueueSize() {
|
||||
return queueSize;
|
||||
}
|
||||
|
||||
public int getRemoteRequestsCurrent() {
|
||||
return remoteRequestsCurrent;
|
||||
}
|
||||
|
||||
public long getRemoteRequestsTotal() {
|
||||
return remoteRequestsTotal;
|
||||
}
|
||||
|
||||
public long getExecutedSearchesTotal() {
|
||||
return executedSearchesTotal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(nodeId);
|
||||
out.writeVInt(queueSize);
|
||||
out.writeVInt(remoteRequestsCurrent);
|
||||
out.writeVLong(remoteRequestsTotal);
|
||||
out.writeVLong(executedSearchesTotal);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field("node_id", nodeId);
|
||||
builder.field("queue_size", queueSize);
|
||||
builder.field("remote_requests_current", remoteRequestsCurrent);
|
||||
builder.field("remote_requests_total", remoteRequestsTotal);
|
||||
builder.field("executed_searches_total", executedSearchesTotal);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
CoordinatorStats stats = (CoordinatorStats) o;
|
||||
return Objects.equals(nodeId, stats.nodeId) &&
|
||||
queueSize == stats.queueSize &&
|
||||
remoteRequestsCurrent == stats.remoteRequestsCurrent &&
|
||||
remoteRequestsTotal == stats.remoteRequestsTotal &&
|
||||
executedSearchesTotal == stats.executedSearchesTotal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(nodeId, queueSize, remoteRequestsCurrent, remoteRequestsTotal, executedSearchesTotal);
|
||||
}
|
||||
}
|
||||
|
||||
public static class ExecutingPolicy implements Writeable, ToXContentFragment {
|
||||
|
||||
private final String name;
|
||||
private final TaskInfo taskInfo;
|
||||
|
||||
public ExecutingPolicy(String name, TaskInfo taskInfo) {
|
||||
this.name = name;
|
||||
this.taskInfo = taskInfo;
|
||||
}
|
||||
|
||||
ExecutingPolicy(StreamInput in) throws IOException {
|
||||
this(in.readString(), new TaskInfo(in));
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public TaskInfo getTaskInfo() {
|
||||
return taskInfo;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
taskInfo.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field("name", name);
|
||||
builder.startObject("task");
|
||||
{
|
||||
builder.value(taskInfo);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
ExecutingPolicy that = (ExecutingPolicy) o;
|
||||
return name.equals(that.name) &&
|
||||
taskInfo.equals(that.taskInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, taskInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ExecuteEnrichPolicyAction extends ActionType<ExecuteEnrichPolicyAction.Response> {
|
||||
|
||||
public static final ExecuteEnrichPolicyAction INSTANCE = new ExecuteEnrichPolicyAction();
|
||||
public static final String NAME = "cluster:admin/xpack/enrich/execute";
|
||||
|
||||
private ExecuteEnrichPolicyAction() {
|
||||
super(NAME, ExecuteEnrichPolicyAction.Response::new);
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeRequest<Request> {
|
||||
|
||||
private final String name;
|
||||
private boolean waitForCompletion;
|
||||
|
||||
public Request(String name) {
|
||||
this.name = Objects.requireNonNull(name, "name cannot be null");
|
||||
this.waitForCompletion = true;
|
||||
}
|
||||
|
||||
public Request(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
name = in.readString();
|
||||
waitForCompletion = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(name);
|
||||
out.writeBoolean(waitForCompletion);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public boolean isWaitForCompletion() {
|
||||
return waitForCompletion;
|
||||
}
|
||||
|
||||
public Request setWaitForCompletion(boolean waitForCompletion) {
|
||||
this.waitForCompletion = waitForCompletion;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
// This will be displayed in tasks api and allows stats api to figure out which policies are being executed.
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return waitForCompletion == request.waitForCompletion &&
|
||||
Objects.equals(name, request.name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, waitForCompletion);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private final TaskId taskId;
|
||||
private final ExecuteEnrichPolicyStatus status;
|
||||
|
||||
public Response(ExecuteEnrichPolicyStatus status) {
|
||||
this.taskId = null;
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public Response(TaskId taskId) {
|
||||
this.taskId = taskId;
|
||||
this.status = null;
|
||||
}
|
||||
|
||||
public TaskId getTaskId() {
|
||||
return taskId;
|
||||
}
|
||||
|
||||
public ExecuteEnrichPolicyStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public Response(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
if (in.readBoolean()) {
|
||||
this.status = new ExecuteEnrichPolicyStatus(in);
|
||||
this.taskId = null;
|
||||
} else {
|
||||
this.taskId = TaskId.readFromStream(in);
|
||||
this.status = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
boolean waitedForCompletion = status != null;
|
||||
out.writeBoolean(waitedForCompletion);
|
||||
if (waitedForCompletion) {
|
||||
status.writeTo(out);
|
||||
} else {
|
||||
taskId.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
if (taskId != null) {
|
||||
builder.field("task", taskId.getNodeId() + ":" + taskId.getId());
|
||||
} else {
|
||||
builder.field("status", status);
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.enrich.action;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
|
||||
public class ExecuteEnrichPolicyStatus implements Task.Status {
|
||||
|
||||
public static final class PolicyPhases {
|
||||
private PolicyPhases() {}
|
||||
|
||||
public static final String SCHEDULED = "SCHEDULED";
|
||||
public static final String RUNNING = "RUNNING";
|
||||
public static final String COMPLETE = "COMPLETE";
|
||||
public static final String FAILED = "FAILED";
|
||||
}
|
||||
|
||||
public static final String NAME = "enrich-policy-execution";
|
||||
|
||||
private static final String PHASE_FIELD = "phase";
|
||||
|
||||
private final String phase;
|
||||
|
||||
public ExecuteEnrichPolicyStatus(String phase) {
|
||||
this.phase = phase;
|
||||
}
|
||||
|
||||
public ExecuteEnrichPolicyStatus(StreamInput in) throws IOException {
|
||||
this.phase = in.readString();
|
||||
}
|
||||
|
||||
public String getPhase() {
|
||||
return phase;
|
||||
}
|
||||
|
||||
public boolean isCompleted() {
|
||||
return PolicyPhases.COMPLETE.equals(phase);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(phase);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field(PHASE_FIELD, phase);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class GetEnrichPolicyAction extends ActionType<GetEnrichPolicyAction.Response> {
|
||||
|
||||
public static final GetEnrichPolicyAction INSTANCE = new GetEnrichPolicyAction();
|
||||
public static final String NAME = "cluster:admin/xpack/enrich/get";
|
||||
|
||||
private GetEnrichPolicyAction() {
|
||||
super(NAME, Response::new);
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeReadRequest<Request> {
|
||||
|
||||
private final List<String> names;
|
||||
|
||||
public Request() {
|
||||
this.names = new ArrayList<>();
|
||||
}
|
||||
|
||||
public Request(String[] names) {
|
||||
this.names = Arrays.asList(names);
|
||||
}
|
||||
|
||||
public Request(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
this.names = in.readStringList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<String> getNames() {
|
||||
return names;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringCollection(names);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return Objects.equals(names, request.names);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(names);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private final List<EnrichPolicy.NamedPolicy> policies;
|
||||
|
||||
public Response(Map<String, EnrichPolicy> policies) {
|
||||
Objects.requireNonNull(policies, "policies cannot be null");
|
||||
// use a treemap to guarantee ordering in the set, then transform it to the list of named policies
|
||||
this.policies = new TreeMap<>(policies).entrySet().stream()
|
||||
.map(entry -> new EnrichPolicy.NamedPolicy(entry.getKey(), entry.getValue())).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public Response(StreamInput in) throws IOException {
|
||||
policies = in.readList(EnrichPolicy.NamedPolicy::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeList(policies);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startArray("policies");
|
||||
{
|
||||
for (EnrichPolicy.NamedPolicy policy : policies) {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startObject("config");
|
||||
{
|
||||
policy.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
public List<EnrichPolicy.NamedPolicy> getPolicies() {
|
||||
return policies;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Response response = (Response) o;
|
||||
return policies.equals(response.policies);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(policies);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.enrich.action;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PutEnrichPolicyAction extends ActionType<AcknowledgedResponse> {
|
||||
|
||||
public static final PutEnrichPolicyAction INSTANCE = new PutEnrichPolicyAction();
|
||||
public static final String NAME = "cluster:admin/xpack/enrich/put";
|
||||
|
||||
private PutEnrichPolicyAction() {
|
||||
super(NAME, AcknowledgedResponse::new);
|
||||
}
|
||||
|
||||
public static Request fromXContent(XContentParser parser, String name) throws IOException {
|
||||
return new Request(name, EnrichPolicy.fromXContent(parser));
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeRequest<PutEnrichPolicyAction.Request> {
|
||||
|
||||
private final EnrichPolicy policy;
|
||||
private final String name;
|
||||
|
||||
public Request(String name, EnrichPolicy policy) {
|
||||
this.name = Objects.requireNonNull(name, "name cannot be null");
|
||||
if (!Version.CURRENT.equals(policy.getElasticsearchVersion())) {
|
||||
throw new IllegalArgumentException("Cannot set [version_created] field on enrich policy [" + name +
|
||||
"]. Found [" + policy.getElasticsearchVersion() + "]");
|
||||
}
|
||||
this.policy = policy;
|
||||
}
|
||||
|
||||
public Request(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
name = in.readString();
|
||||
policy = new EnrichPolicy(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(name);
|
||||
policy.writeTo(out);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public EnrichPolicy getPolicy() {
|
||||
return policy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return policy.equals(request.policy) &&
|
||||
name.equals(request.name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(policy, name);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.enrich.client;
|
||||
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class EnrichClient {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
public EnrichClient(ElasticsearchClient client) {
|
||||
this.client = Objects.requireNonNull(client, "client");
|
||||
}
|
||||
|
||||
public void deleteEnrichPolicy(
|
||||
final DeleteEnrichPolicyAction.Request request,
|
||||
final ActionListener<AcknowledgedResponse> listener) {
|
||||
client.execute(DeleteEnrichPolicyAction.INSTANCE, request, listener);
|
||||
}
|
||||
|
||||
public ActionFuture<AcknowledgedResponse> deleteEnrichPolicy(final DeleteEnrichPolicyAction.Request request) {
|
||||
final PlainActionFuture<AcknowledgedResponse> listener = PlainActionFuture.newFuture();
|
||||
client.execute(DeleteEnrichPolicyAction.INSTANCE, request, listener);
|
||||
return listener;
|
||||
}
|
||||
|
||||
public void executeEnrichPolicy(
|
||||
final ExecuteEnrichPolicyAction.Request request,
|
||||
final ActionListener<ExecuteEnrichPolicyAction.Response> listener) {
|
||||
client.execute(ExecuteEnrichPolicyAction.INSTANCE, request, listener);
|
||||
}
|
||||
|
||||
public ActionFuture<ExecuteEnrichPolicyAction.Response> executeEnrichPolicy(final ExecuteEnrichPolicyAction.Request request) {
|
||||
final PlainActionFuture<ExecuteEnrichPolicyAction.Response> listener = PlainActionFuture.newFuture();
|
||||
client.execute(ExecuteEnrichPolicyAction.INSTANCE, request, listener);
|
||||
return listener;
|
||||
}
|
||||
|
||||
public void getEnrichPolicy(
|
||||
final GetEnrichPolicyAction.Request request,
|
||||
final ActionListener<GetEnrichPolicyAction.Response> listener) {
|
||||
client.execute(GetEnrichPolicyAction.INSTANCE, request, listener);
|
||||
}
|
||||
|
||||
public ActionFuture<GetEnrichPolicyAction.Response> getEnrichPolicy(final GetEnrichPolicyAction.Request request) {
|
||||
final PlainActionFuture<GetEnrichPolicyAction.Response> listener = PlainActionFuture.newFuture();
|
||||
client.execute(GetEnrichPolicyAction.INSTANCE, request, listener);
|
||||
return listener;
|
||||
}
|
||||
|
||||
public void putEnrichPolicy(
|
||||
final PutEnrichPolicyAction.Request request,
|
||||
final ActionListener<AcknowledgedResponse> listener) {
|
||||
client.execute(PutEnrichPolicyAction.INSTANCE, request, listener);
|
||||
}
|
||||
|
||||
public ActionFuture<AcknowledgedResponse> putEnrichPolicy(final PutEnrichPolicyAction.Request request) {
|
||||
final PlainActionFuture<AcknowledgedResponse> listener = PlainActionFuture.newFuture();
|
||||
client.execute(PutEnrichPolicyAction.INSTANCE, request, listener);
|
||||
return listener;
|
||||
}
|
||||
}
|
|
@ -774,6 +774,10 @@ public class RoleDescriptor implements ToXContentObject, Writeable {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder indices(Collection<String> indices) {
|
||||
return indices(indices.toArray(new String[indices.size()]));
|
||||
}
|
||||
|
||||
public Builder privileges(String... privileges) {
|
||||
indicesPrivileges.privileges = privileges;
|
||||
return this;
|
||||
|
|
|
@ -84,6 +84,7 @@ public class ClusterPrivilegeResolver {
|
|||
Collections.unmodifiableSet(Sets.newHashSet("cluster:admin/slm/*", StartILMAction.NAME, StopILMAction.NAME, GetStatusAction.NAME));
|
||||
private static final Set<String> READ_SLM_PATTERN = Collections.unmodifiableSet(Sets.newHashSet(GetSnapshotLifecycleAction.NAME,
|
||||
GetStatusAction.NAME));
|
||||
private static final Set<String> MANAGE_ENRICH_AUTOMATON = Collections.unmodifiableSet(Sets.newHashSet("cluster:admin/xpack/enrich/*"));
|
||||
|
||||
public static final NamedClusterPrivilege NONE = new ActionClusterPrivilege("none", Collections.emptySet(), Collections.emptySet());
|
||||
public static final NamedClusterPrivilege ALL = new ActionClusterPrivilege("all", ALL_CLUSTER_PATTERN);
|
||||
|
@ -128,6 +129,7 @@ public class ClusterPrivilegeResolver {
|
|||
Sets.newHashSet(DelegatePkiAuthenticationAction.NAME, InvalidateTokenAction.NAME));
|
||||
|
||||
public static final NamedClusterPrivilege MANAGE_OWN_API_KEY = ManageOwnApiKeyClusterPrivilege.INSTANCE;
|
||||
public static final NamedClusterPrivilege MANAGE_ENRICH = new ActionClusterPrivilege("manage_enrich", MANAGE_ENRICH_AUTOMATON);
|
||||
|
||||
private static final Map<String, NamedClusterPrivilege> VALUES = Collections.unmodifiableMap(
|
||||
Stream.of(
|
||||
|
@ -162,7 +164,8 @@ public class ClusterPrivilegeResolver {
|
|||
MANAGE_SLM,
|
||||
READ_SLM,
|
||||
DELEGATE_PKI,
|
||||
MANAGE_OWN_API_KEY).collect(Collectors.toMap(cp -> cp.name(), cp -> cp)));
|
||||
MANAGE_OWN_API_KEY,
|
||||
MANAGE_ENRICH).collect(Collectors.toMap(cp -> cp.name(), cp -> cp)));
|
||||
|
||||
/**
|
||||
* Resolves a {@link NamedClusterPrivilege} from a given name if it exists.
|
||||
|
|
|
@ -264,6 +264,11 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
.privileges("view_index_metadata")
|
||||
.allowRestrictedIndices(true)
|
||||
.build() }, null, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null))
|
||||
.put("enrich_user", new RoleDescriptor("enrich_user", new String[]{ "manage_enrich", "manage_ingest_pipelines", "monitor" },
|
||||
new RoleDescriptor.IndicesPrivileges[]{ RoleDescriptor.IndicesPrivileges.builder()
|
||||
.indices(".enrich-*")
|
||||
.privileges("manage", "read", "write")
|
||||
.build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||
.immutableMap();
|
||||
}
|
||||
|
||||
|
|
|
@ -1093,6 +1093,62 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"enrich_coordinator_stats" : {
|
||||
"properties": {
|
||||
"node_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"queue_size": {
|
||||
"type": "integer"
|
||||
},
|
||||
"remote_requests_current" : {
|
||||
"type": "long"
|
||||
},
|
||||
"remote_requests_total" : {
|
||||
"type": "long"
|
||||
},
|
||||
"executed_searches_total" : {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"enrich_executing_policy_stats": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"task": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"node": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"id": {
|
||||
"type": "long"
|
||||
},
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"action": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"description": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"start_time_in_millis": {
|
||||
"type": "date",
|
||||
"format": "epoch_millis"
|
||||
},
|
||||
"running_time_in_nanos": {
|
||||
"type": "long"
|
||||
},
|
||||
"cancellable": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,10 @@ package org.elasticsearch.xpack.core.security.authz.privilege;
|
|||
import org.apache.lucene.util.automaton.Operations;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||
import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission;
|
||||
|
@ -177,6 +181,15 @@ public class PrivilegeTests extends ESTestCase {
|
|||
|
||||
}
|
||||
|
||||
public void testManageEnrichPrivilege() {
|
||||
verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, DeleteEnrichPolicyAction.NAME);
|
||||
verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, ExecuteEnrichPolicyAction.NAME);
|
||||
verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, GetEnrichPolicyAction.NAME);
|
||||
verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, PutEnrichPolicyAction.NAME);
|
||||
verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ENRICH, "cluster:admin/xpack/enrich/brand_new_api");
|
||||
verifyClusterActionDenied(ClusterPrivilegeResolver.MANAGE_ENRICH, "cluster:admin/xpack/whatever");
|
||||
}
|
||||
|
||||
public void testIlmPrivileges() {
|
||||
{
|
||||
verifyClusterActionAllowed(ClusterPrivilegeResolver.MANAGE_ILM, "cluster:admin/ilm/delete",
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
evaluationDependsOn(xpackModule('core'))
|
||||
|
||||
apply plugin: 'elasticsearch.esplugin'
|
||||
esplugin {
|
||||
name 'x-pack-enrich'
|
||||
description 'Elasticsearch Expanded Pack Plugin - Enrich'
|
||||
classname 'org.elasticsearch.xpack.enrich.EnrichPlugin'
|
||||
extendedPlugins = ['x-pack-core']
|
||||
}
|
||||
archivesBaseName = 'x-pack-enrich'
|
||||
|
||||
dependencies {
|
||||
compileOnly project(path: xpackModule('core'), configuration: 'default')
|
||||
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
||||
testCompile project(path: ':modules:ingest-common')
|
||||
testCompile project(path: xpackModule('monitoring'), configuration: 'testArtifacts')
|
||||
}
|
||||
|
||||
// No real integ tests in the module:
|
||||
integTest.enabled = false
|
||||
|
||||
// Instead we create a separate task to run the tests based on ESIntegTestCase
|
||||
task internalClusterTest(type: Test) {
|
||||
description = '🌈🌈🌈🦄 Welcome to fantasy integration tests land! 🦄🌈🌈🌈'
|
||||
mustRunAfter test
|
||||
|
||||
include '**/*IT.class'
|
||||
systemProperty 'es.set.netty.runtime.available.processors', 'false'
|
||||
}
|
||||
|
||||
check.dependsOn internalClusterTest
|
||||
|
||||
// add all sub-projects of the qa sub-project
|
||||
gradle.projectsEvaluated {
|
||||
project.subprojects
|
||||
.find { it.path == project.path + ":qa" }
|
||||
.subprojects
|
||||
.findAll { it.path.startsWith(project.path + ":qa") }
|
||||
.each { check.dependsOn it.check }
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||
|
||||
apply plugin: 'elasticsearch.build'
|
||||
test.enabled = false
|
||||
|
||||
dependencies {
|
||||
compile project(':test:framework')
|
||||
}
|
||||
|
||||
subprojects {
|
||||
project.tasks.withType(RestIntegTestTask) {
|
||||
final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources')
|
||||
project.copyRestSpec.from(xPackResources) {
|
||||
include 'rest-api-spec/api/**'
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
apply plugin: 'elasticsearch.build'
|
||||
test.enabled = false
|
||||
|
||||
dependencies {
|
||||
compile project(':test:framework')
|
||||
}
|
|
@ -0,0 +1,196 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.test.enrich;
|
||||
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.junit.After;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
|
||||
public abstract class CommonEnrichRestTestCase extends ESRestTestCase {
|
||||
|
||||
@After
|
||||
public void deletePolicies() throws Exception {
|
||||
Map<String, Object> responseMap = toMap(adminClient().performRequest(new Request("GET", "/_enrich/policy")));
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Map<?,?>> policies = (List<Map<?,?>>) responseMap.get("policies");
|
||||
|
||||
for (Map<?, ?> entry: policies) {
|
||||
client().performRequest(new Request("DELETE", "/_enrich/policy/" +
|
||||
XContentMapValues.extractValue("config.match.name", entry)));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveIndicesUponCompletion() {
|
||||
// In order to avoid monitoring from failing exporting docs to monitor index.
|
||||
return true;
|
||||
}
|
||||
|
||||
private void setupGenericLifecycleTest(boolean deletePipeilne) throws Exception {
|
||||
// Create the policy:
|
||||
Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy");
|
||||
putPolicyRequest.setJsonEntity(generatePolicySource("my-source-index"));
|
||||
assertOK(client().performRequest(putPolicyRequest));
|
||||
|
||||
// Add entry to source index and then refresh:
|
||||
Request indexRequest = new Request("PUT", "/my-source-index/_doc/elastic.co");
|
||||
indexRequest.setJsonEntity("{\"host\": \"elastic.co\",\"globalRank\": 25,\"tldRank\": 7,\"tld\": \"co\"}");
|
||||
assertOK(client().performRequest(indexRequest));
|
||||
Request refreshRequest = new Request("POST", "/my-source-index/_refresh");
|
||||
assertOK(client().performRequest(refreshRequest));
|
||||
|
||||
// Execute the policy:
|
||||
Request executePolicyRequest = new Request("POST", "/_enrich/policy/my_policy/_execute");
|
||||
assertOK(client().performRequest(executePolicyRequest));
|
||||
|
||||
// Create pipeline
|
||||
Request putPipelineRequest = new Request("PUT", "/_ingest/pipeline/my_pipeline");
|
||||
putPipelineRequest.setJsonEntity("{\"processors\":[" +
|
||||
"{\"enrich\":{\"policy_name\":\"my_policy\",\"field\":\"host\",\"target_field\":\"entry\"}}" +
|
||||
"]}");
|
||||
assertOK(client().performRequest(putPipelineRequest));
|
||||
|
||||
// Index document using pipeline with enrich processor:
|
||||
indexRequest = new Request("PUT", "/my-index/_doc/1");
|
||||
indexRequest.addParameter("pipeline", "my_pipeline");
|
||||
indexRequest.setJsonEntity("{\"host\": \"elastic.co\"}");
|
||||
assertOK(client().performRequest(indexRequest));
|
||||
|
||||
// Check if document has been enriched
|
||||
Request getRequest = new Request("GET", "/my-index/_doc/1");
|
||||
Map<String, Object> response = toMap(client().performRequest(getRequest));
|
||||
Map<?, ?> entry = (Map<?, ?>) ((Map<?, ?>) response.get("_source")).get("entry");
|
||||
assertThat(entry.size(), equalTo(4));
|
||||
assertThat(entry.get("host"), equalTo("elastic.co"));
|
||||
assertThat(entry.get("tld"), equalTo("co"));
|
||||
assertThat(entry.get("globalRank"), equalTo(25));
|
||||
assertThat(entry.get("tldRank"), equalTo(7));
|
||||
|
||||
if (deletePipeilne) {
|
||||
// delete the pipeline so the policies can be deleted
|
||||
client().performRequest(new Request("DELETE", "/_ingest/pipeline/my_pipeline"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testBasicFlow() throws Exception {
|
||||
setupGenericLifecycleTest(true);
|
||||
assertBusy(CommonEnrichRestTestCase::verifyEnrichMonitoring, 1, TimeUnit.MINUTES);
|
||||
}
|
||||
|
||||
public void testImmutablePolicy() throws IOException {
|
||||
Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy");
|
||||
putPolicyRequest.setJsonEntity(generatePolicySource("my-source-index"));
|
||||
assertOK(client().performRequest(putPolicyRequest));
|
||||
|
||||
ResponseException exc = expectThrows(ResponseException.class, () -> client().performRequest(putPolicyRequest));
|
||||
assertTrue(exc.getMessage().contains("policy [my_policy] already exists"));
|
||||
}
|
||||
|
||||
public void testDeleteIsCaseSensitive() throws Exception {
|
||||
Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy");
|
||||
putPolicyRequest.setJsonEntity(generatePolicySource("my-source-index"));
|
||||
assertOK(client().performRequest(putPolicyRequest));
|
||||
|
||||
ResponseException exc = expectThrows(ResponseException.class,
|
||||
() -> client().performRequest(new Request("DELETE", "/_enrich/policy/MY_POLICY")));
|
||||
assertTrue(exc.getMessage().contains("policy [MY_POLICY] not found"));
|
||||
}
|
||||
|
||||
public void testDeleteExistingPipeline() throws Exception {
|
||||
// lets not delete the pipeline at first, to test the failure
|
||||
setupGenericLifecycleTest(false);
|
||||
|
||||
Request putPipelineRequest = new Request("PUT", "/_ingest/pipeline/another_pipeline");
|
||||
putPipelineRequest.setJsonEntity("{\"processors\":[" +
|
||||
"{\"enrich\":{\"policy_name\":\"my_policy\",\"field\":\"host\",\"target_field\":\"entry\"}}" +
|
||||
"]}");
|
||||
assertOK(client().performRequest(putPipelineRequest));
|
||||
|
||||
ResponseException exc = expectThrows(ResponseException.class,
|
||||
() -> client().performRequest(new Request("DELETE", "/_enrich/policy/my_policy")));
|
||||
assertTrue(exc.getMessage().contains("Could not delete policy [my_policy] because" +
|
||||
" a pipeline is referencing it [my_pipeline, another_pipeline]"));
|
||||
|
||||
// delete the pipelines so the policies can be deleted
|
||||
client().performRequest(new Request("DELETE", "/_ingest/pipeline/my_pipeline"));
|
||||
client().performRequest(new Request("DELETE", "/_ingest/pipeline/another_pipeline"));
|
||||
|
||||
// verify the delete did not happen
|
||||
Request getRequest = new Request("GET", "/_enrich/policy/my_policy");
|
||||
assertOK(client().performRequest(getRequest));
|
||||
}
|
||||
|
||||
public static String generatePolicySource(String index) throws IOException {
|
||||
XContentBuilder source = jsonBuilder().startObject().startObject("match");
|
||||
{
|
||||
source.field("indices", index);
|
||||
if (randomBoolean()) {
|
||||
source.field("query", QueryBuilders.matchAllQuery());
|
||||
}
|
||||
source.field("match_field", "host");
|
||||
source.field("enrich_fields", new String[] {"globalRank", "tldRank", "tld"});
|
||||
}
|
||||
source.endObject().endObject();
|
||||
return Strings.toString(source);
|
||||
}
|
||||
|
||||
private static Map<String, Object> toMap(Response response) throws IOException {
|
||||
return toMap(EntityUtils.toString(response.getEntity()));
|
||||
}
|
||||
|
||||
private static Map<String, Object> toMap(String response) {
|
||||
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
|
||||
}
|
||||
|
||||
private static void verifyEnrichMonitoring() throws IOException {
|
||||
Request request = new Request("GET", "/.monitoring-*/_search");
|
||||
request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"enrich_coordinator_stats\"}}}");
|
||||
Map<String, ?> response;
|
||||
try {
|
||||
response = toMap(adminClient().performRequest(request));
|
||||
} catch (ResponseException e) {
|
||||
throw new AssertionError("error while searching", e);
|
||||
}
|
||||
|
||||
int maxRemoteRequestsTotal = 0;
|
||||
int maxExecutedSearchesTotal = 0;
|
||||
|
||||
List<?> hits = (List<?>) XContentMapValues.extractValue("hits.hits", response);
|
||||
assertThat(hits.size(), greaterThanOrEqualTo(1));
|
||||
|
||||
for (int i = 0; i < hits.size(); i++) {
|
||||
Map<?, ?> hit = (Map<?, ?>) hits.get(i);
|
||||
|
||||
int foundRemoteRequestsTotal =
|
||||
(int) XContentMapValues.extractValue("_source.enrich_coordinator_stats.remote_requests_total", hit);
|
||||
maxRemoteRequestsTotal = Math.max(maxRemoteRequestsTotal, foundRemoteRequestsTotal);
|
||||
int foundExecutedSearchesTotal =
|
||||
(int) XContentMapValues.extractValue("_source.enrich_coordinator_stats.executed_searches_total", hit);
|
||||
maxExecutedSearchesTotal = Math.max(maxExecutedSearchesTotal, foundExecutedSearchesTotal);
|
||||
}
|
||||
|
||||
assertThat(maxRemoteRequestsTotal, greaterThanOrEqualTo(1));
|
||||
assertThat(maxExecutedSearchesTotal, greaterThanOrEqualTo(1));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
apply plugin: 'elasticsearch.testclusters'
|
||||
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: xpackModule('enrich'), configuration: 'runtime')
|
||||
testCompile project(path: xpackModule('core'), configuration: 'runtime')
|
||||
testCompile project(path: xpackModule('enrich:qa:common'), configuration: 'runtime')}
|
||||
|
||||
testClusters.integTest {
|
||||
testDistribution = 'DEFAULT'
|
||||
extraConfigFile 'roles.yml', file('roles.yml')
|
||||
user username: "test_admin", password: "x-pack-test-password", role: "superuser"
|
||||
user username: "test_enrich", password: "x-pack-test-password", role: "enrich_user,integ_test_role"
|
||||
user username: "test_enrich_no_privs", password: "x-pack-test-password", role: "enrich_no_privs"
|
||||
setting 'xpack.license.self_generated.type', 'basic'
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setting 'xpack.monitoring.collection.enabled', 'true'
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
integ_test_role:
|
||||
indices:
|
||||
- names: [ 'my-index', 'my-source-index' ]
|
||||
privileges:
|
||||
- manage
|
||||
- read
|
||||
- write
|
||||
|
||||
enrich_no_privs:
|
||||
cluster:
|
||||
- manage_ingest_pipelines
|
||||
- monitor
|
||||
indices:
|
||||
- names: [ '.enrich-my_policy*', 'my-index', 'my-source-index' ]
|
||||
privileges:
|
||||
- manage
|
||||
- read
|
||||
- write
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.enrich.CommonEnrichRestTestCase;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
|
||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class EnrichSecurityFailureIT extends ESRestTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
String token = basicAuthHeaderValue("test_enrich_no_privs", new SecureString("x-pack-test-password".toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings restAdminSettings() {
|
||||
String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.build();
|
||||
}
|
||||
|
||||
public void testFailure() throws Exception {
|
||||
Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy");
|
||||
putPolicyRequest.setJsonEntity(CommonEnrichRestTestCase.generatePolicySource("my-source-index"));
|
||||
ResponseException exc = expectThrows(ResponseException.class, () -> client().performRequest(putPolicyRequest));
|
||||
assertTrue(exc.getMessage().contains("action [cluster:admin/xpack/enrich/put] is unauthorized for user [test_enrich_no_privs]"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.enrich.CommonEnrichRestTestCase;
|
||||
|
||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
public class EnrichSecurityIT extends CommonEnrichRestTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
String token = basicAuthHeaderValue("test_enrich", new SecureString("x-pack-test-password".toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings restAdminSettings() {
|
||||
String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.build();
|
||||
}
|
||||
|
||||
public void testInsufficientPermissionsOnNonExistentIndex() throws Exception {
|
||||
// This test is here because it requires a valid user that has permission to execute policy PUTs but should fail if the user
|
||||
// does not have access to read the backing indices used to enrich the data.
|
||||
Request putPolicyRequest = new Request("PUT", "/_enrich/policy/my_policy");
|
||||
putPolicyRequest.setJsonEntity(generatePolicySource("some-other-index"));
|
||||
ResponseException exc = expectThrows(ResponseException.class, () -> client().performRequest(putPolicyRequest));
|
||||
assertThat(exc.getMessage(),
|
||||
containsString("unable to store policy because no indices match with the specified index patterns [some-other-index]"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
apply plugin: 'elasticsearch.testclusters'
|
||||
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: xpackModule('enrich'), configuration: 'runtime')
|
||||
testCompile project(path: xpackModule('enrich:qa:common'), configuration: 'runtime')
|
||||
}
|
||||
|
||||
testClusters.integTest {
|
||||
testDistribution = 'DEFAULT'
|
||||
setting 'xpack.license.self_generated.type', 'basic'
|
||||
setting 'xpack.monitoring.collection.enabled', 'true'
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.test.enrich.CommonEnrichRestTestCase;
|
||||
|
||||
public class EnrichIT extends CommonEnrichRestTestCase {
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
|
||||
public class EnrichRestIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public EnrichRestIT(final ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws Exception {
|
||||
return ESClientYamlSuiteTestCase.createParameters();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
---
|
||||
"Test enrich crud apis":
|
||||
|
||||
- do:
|
||||
enrich.put_policy:
|
||||
name: policy-crud
|
||||
body:
|
||||
match:
|
||||
indices: ["bar*"]
|
||||
match_field: baz
|
||||
enrich_fields: ["a", "b"]
|
||||
- is_true: acknowledged
|
||||
|
||||
- do:
|
||||
enrich.execute_policy:
|
||||
name: policy-crud
|
||||
- match: { status.phase: "COMPLETE" }
|
||||
|
||||
- do:
|
||||
enrich.get_policy:
|
||||
name: policy-crud
|
||||
- length: { policies: 1 }
|
||||
- match: { policies.0.config.match.name: policy-crud }
|
||||
- match: { policies.0.config.match.indices: ["bar*"] }
|
||||
- match: { policies.0.config.match.match_field: baz }
|
||||
- match: { policies.0.config.match.enrich_fields: ["a", "b"] }
|
||||
|
||||
- do:
|
||||
enrich.get_policy: {}
|
||||
- length: { policies: 1 }
|
||||
- match: { policies.0.config.match.name: policy-crud }
|
||||
- match: { policies.0.config.match.indices: ["bar*"] }
|
||||
- match: { policies.0.config.match.match_field: baz }
|
||||
- match: { policies.0.config.match.enrich_fields: ["a", "b"] }
|
||||
|
||||
- do:
|
||||
enrich.stats: {}
|
||||
- length: { executing_policies: 0}
|
||||
- length: { coordinator_stats: 1}
|
||||
- match: { coordinator_stats.0.queue_size: 0}
|
||||
- match: { coordinator_stats.0.remote_requests_current: 0}
|
||||
- gte: { coordinator_stats.0.remote_requests_total: 0}
|
||||
- gte: { coordinator_stats.0.executed_searches_total: 0}
|
||||
|
||||
- do:
|
||||
enrich.delete_policy:
|
||||
name: policy-crud
|
||||
- is_true: acknowledged
|
|
@ -0,0 +1,167 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.routing.Preference;
|
||||
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.ingest.AbstractProcessor;
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
import org.elasticsearch.xpack.enrich.action.EnrichCoordinatorProxyAction;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
public abstract class AbstractEnrichProcessor extends AbstractProcessor {
|
||||
|
||||
private final String policyName;
|
||||
private final BiConsumer<SearchRequest, BiConsumer<SearchResponse, Exception>> searchRunner;
|
||||
private final String field;
|
||||
private final String targetField;
|
||||
private final boolean ignoreMissing;
|
||||
private final boolean overrideEnabled;
|
||||
protected final String matchField;
|
||||
protected final int maxMatches;
|
||||
|
||||
protected AbstractEnrichProcessor(String tag, Client client, String policyName, String field, String targetField,
|
||||
boolean ignoreMissing, boolean overrideEnabled, String matchField, int maxMatches) {
|
||||
this(tag, createSearchRunner(client), policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches);
|
||||
}
|
||||
|
||||
protected AbstractEnrichProcessor(String tag,
|
||||
BiConsumer<SearchRequest, BiConsumer<SearchResponse, Exception>> searchRunner,
|
||||
String policyName, String field, String targetField, boolean ignoreMissing, boolean overrideEnabled,
|
||||
String matchField, int maxMatches) {
|
||||
super(tag);
|
||||
this.policyName = policyName;
|
||||
this.searchRunner = searchRunner;
|
||||
this.field = field;
|
||||
this.targetField = targetField;
|
||||
this.ignoreMissing = ignoreMissing;
|
||||
this.overrideEnabled = overrideEnabled;
|
||||
this.matchField = matchField;
|
||||
this.maxMatches = maxMatches;
|
||||
}
|
||||
|
||||
public abstract QueryBuilder getQueryBuilder(Object fieldValue);
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument ingestDocument, BiConsumer<IngestDocument, Exception> handler) {
|
||||
try {
|
||||
// If a document does not have the enrich key, return the unchanged document
|
||||
final Object value = ingestDocument.getFieldValue(field, Object.class, ignoreMissing);
|
||||
if (value == null) {
|
||||
handler.accept(ingestDocument, null);
|
||||
return;
|
||||
}
|
||||
|
||||
QueryBuilder queryBuilder = getQueryBuilder(value);
|
||||
ConstantScoreQueryBuilder constantScore = new ConstantScoreQueryBuilder(queryBuilder);
|
||||
SearchSourceBuilder searchBuilder = new SearchSourceBuilder();
|
||||
searchBuilder.from(0);
|
||||
searchBuilder.size(maxMatches);
|
||||
searchBuilder.trackScores(false);
|
||||
searchBuilder.fetchSource(true);
|
||||
searchBuilder.query(constantScore);
|
||||
SearchRequest req = new SearchRequest();
|
||||
req.indices(EnrichPolicy.getBaseName(getPolicyName()));
|
||||
req.preference(Preference.LOCAL.type());
|
||||
req.source(searchBuilder);
|
||||
|
||||
searchRunner.accept(req, (searchResponse, e) -> {
|
||||
if (e != null) {
|
||||
handler.accept(null, e);
|
||||
return;
|
||||
}
|
||||
|
||||
// If the index is empty, return the unchanged document
|
||||
// If the enrich key does not exist in the index, throw an error
|
||||
// If no documents match the key, return the unchanged document
|
||||
SearchHit[] searchHits = searchResponse.getHits().getHits();
|
||||
if (searchHits.length < 1) {
|
||||
handler.accept(ingestDocument, null);
|
||||
return;
|
||||
}
|
||||
|
||||
if (overrideEnabled || ingestDocument.hasField(targetField) == false) {
|
||||
if (maxMatches == 1) {
|
||||
Map<String, Object> firstDocument = searchHits[0].getSourceAsMap();
|
||||
ingestDocument.setFieldValue(targetField, firstDocument);
|
||||
} else {
|
||||
List<Map<String, Object>> enrichDocuments = new ArrayList<>(searchHits.length);
|
||||
for (SearchHit searchHit : searchHits) {
|
||||
Map<String, Object> enrichDocument = searchHit.getSourceAsMap();
|
||||
enrichDocuments.add(enrichDocument);
|
||||
}
|
||||
ingestDocument.setFieldValue(targetField, enrichDocuments);
|
||||
}
|
||||
}
|
||||
handler.accept(ingestDocument, null);
|
||||
});
|
||||
} catch (Exception e) {
|
||||
handler.accept(null, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
|
||||
throw new UnsupportedOperationException("this method should not get executed");
|
||||
}
|
||||
|
||||
public String getPolicyName() {
|
||||
return policyName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return EnrichProcessorFactory.TYPE;
|
||||
}
|
||||
|
||||
String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
public String getTargetField() {
|
||||
return targetField;
|
||||
}
|
||||
|
||||
boolean isIgnoreMissing() {
|
||||
return ignoreMissing;
|
||||
}
|
||||
|
||||
boolean isOverrideEnabled() {
|
||||
return overrideEnabled;
|
||||
}
|
||||
|
||||
public String getMatchField() {
|
||||
return matchField;
|
||||
}
|
||||
|
||||
int getMaxMatches() {
|
||||
return maxMatches;
|
||||
}
|
||||
|
||||
private static BiConsumer<SearchRequest, BiConsumer<SearchResponse, Exception>> createSearchRunner(Client client) {
|
||||
return (req, handler) -> {
|
||||
client.execute(EnrichCoordinatorProxyAction.INSTANCE, req, ActionListener.wrap(
|
||||
resp -> {
|
||||
handler.accept(resp, null);
|
||||
},
|
||||
e -> {
|
||||
handler.accept(null, e);
|
||||
}));
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.AbstractNamedDiffable;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Encapsulates enrich policies as custom metadata inside cluster state.
|
||||
*/
|
||||
public final class EnrichMetadata extends AbstractNamedDiffable<MetaData.Custom> implements XPackPlugin.XPackMetaDataCustom {
|
||||
|
||||
static final String TYPE = "enrich";
|
||||
|
||||
static final ParseField POLICIES = new ParseField("policies");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<EnrichMetadata, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"enrich_metadata",
|
||||
args -> new EnrichMetadata((Map<String, EnrichPolicy>) args[0])
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> {
|
||||
Map<String, EnrichPolicy> patterns = new HashMap<>();
|
||||
String fieldName = null;
|
||||
for (XContentParser.Token token = p.nextToken(); token != XContentParser.Token.END_OBJECT; token = p.nextToken()) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = p.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
patterns.put(fieldName, EnrichPolicy.fromXContent(p));
|
||||
} else {
|
||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
||||
}
|
||||
}
|
||||
return patterns;
|
||||
}, POLICIES);
|
||||
}
|
||||
|
||||
public static EnrichMetadata fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private final Map<String, EnrichPolicy> policies;
|
||||
|
||||
public EnrichMetadata(StreamInput in) throws IOException {
|
||||
this(in.readMap(StreamInput::readString, EnrichPolicy::new));
|
||||
}
|
||||
|
||||
public EnrichMetadata(Map<String, EnrichPolicy> policies) {
|
||||
this.policies = Collections.unmodifiableMap(policies);
|
||||
}
|
||||
|
||||
public Map<String, EnrichPolicy> getPolicies() {
|
||||
return policies;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EnumSet<MetaData.XContentContext> context() {
|
||||
return MetaData.ALL_CONTEXTS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Version getMinimalSupportedVersion() {
|
||||
return Version.V_7_5_0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeMap(policies, StreamOutput::writeString, (out1, value) -> value.writeTo(out1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(POLICIES.getPreferredName());
|
||||
for (Map.Entry<String, EnrichPolicy> entry : policies.entrySet()) {
|
||||
builder.startObject(entry.getKey());
|
||||
builder.value(entry.getValue());
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
EnrichMetadata that = (EnrichMetadata) o;
|
||||
return policies.equals(that.policies);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(policies);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,203 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.NamedDiff;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.plugins.ActionPlugin;
|
||||
import org.elasticsearch.plugins.IngestPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||
import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.action.EnrichCoordinatorProxyAction;
|
||||
import org.elasticsearch.xpack.enrich.action.EnrichCoordinatorStatsAction;
|
||||
import org.elasticsearch.xpack.enrich.action.EnrichShardMultiSearchAction;
|
||||
import org.elasticsearch.xpack.enrich.action.TransportDeleteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.action.TransportEnrichStatsAction;
|
||||
import org.elasticsearch.xpack.enrich.action.TransportExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.action.TransportGetEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.action.TransportPutEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.rest.RestDeleteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.rest.RestEnrichStatsAction;
|
||||
import org.elasticsearch.xpack.enrich.rest.RestExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.rest.RestGetEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.rest.RestPutEnrichPolicyAction;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.xpack.core.XPackSettings.ENRICH_ENABLED_SETTING;
|
||||
|
||||
public class EnrichPlugin extends Plugin implements ActionPlugin, IngestPlugin {
|
||||
|
||||
static final Setting<Integer> ENRICH_FETCH_SIZE_SETTING =
|
||||
Setting.intSetting("enrich.fetch_size", 10000, 1, 1000000, Setting.Property.NodeScope);
|
||||
|
||||
static final Setting<Integer> ENRICH_MAX_CONCURRENT_POLICY_EXECUTIONS =
|
||||
Setting.intSetting("enrich.max_concurrent_policy_executions", 50, 1, Setting.Property.NodeScope);
|
||||
|
||||
static final Setting<TimeValue> ENRICH_CLEANUP_PERIOD =
|
||||
Setting.timeSetting("enrich.cleanup_period", new TimeValue(15, TimeUnit.MINUTES), Setting.Property.NodeScope);
|
||||
|
||||
public static final Setting<Integer> COORDINATOR_PROXY_MAX_CONCURRENT_REQUESTS =
|
||||
Setting.intSetting("enrich.coordinator_proxy.max_concurrent_requests", 8, 1, 10000, Setting.Property.NodeScope);
|
||||
|
||||
public static final Setting<Integer> COORDINATOR_PROXY_MAX_LOOKUPS_PER_REQUEST =
|
||||
Setting.intSetting("enrich.coordinator_proxy.max_lookups_per_request", 128, 1, 10000, Setting.Property.NodeScope);
|
||||
|
||||
static final Setting<Integer> ENRICH_MAX_FORCE_MERGE_ATTEMPTS =
|
||||
Setting.intSetting("enrich.max_force_merge_attempts", 3, 1, 10, Setting.Property.NodeScope);
|
||||
|
||||
private static final String QUEUE_CAPACITY_SETTING_NAME = "enrich.coordinator_proxy.queue_capacity";
|
||||
public static final Setting<Integer> COORDINATOR_PROXY_QUEUE_CAPACITY = new Setting<>(QUEUE_CAPACITY_SETTING_NAME,
|
||||
settings -> {
|
||||
int maxConcurrentRequests = COORDINATOR_PROXY_MAX_CONCURRENT_REQUESTS.get(settings);
|
||||
int maxLookupsPerRequest = COORDINATOR_PROXY_MAX_LOOKUPS_PER_REQUEST.get(settings);
|
||||
return String.valueOf(maxConcurrentRequests * maxLookupsPerRequest);
|
||||
},
|
||||
val -> Setting.parseInt(val, 1, Integer.MAX_VALUE, QUEUE_CAPACITY_SETTING_NAME),
|
||||
Setting.Property.NodeScope);
|
||||
|
||||
private final Settings settings;
|
||||
private final Boolean enabled;
|
||||
private final boolean transportClientMode;
|
||||
|
||||
public EnrichPlugin(final Settings settings) {
|
||||
this.settings = settings;
|
||||
this.enabled = ENRICH_ENABLED_SETTING.get(settings);
|
||||
this.transportClientMode = XPackPlugin.transportClientMode(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) {
|
||||
if (enabled == false) {
|
||||
return emptyMap();
|
||||
}
|
||||
|
||||
EnrichProcessorFactory factory = new EnrichProcessorFactory(parameters.client);
|
||||
parameters.ingestService.addIngestClusterStateListener(factory);
|
||||
return Collections.singletonMap(EnrichProcessorFactory.TYPE, factory);
|
||||
}
|
||||
|
||||
protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); }
|
||||
|
||||
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
|
||||
if (enabled == false) {
|
||||
return emptyList();
|
||||
}
|
||||
|
||||
return Arrays.asList(
|
||||
new ActionHandler<>(GetEnrichPolicyAction.INSTANCE, TransportGetEnrichPolicyAction.class),
|
||||
new ActionHandler<>(DeleteEnrichPolicyAction.INSTANCE, TransportDeleteEnrichPolicyAction.class),
|
||||
new ActionHandler<>(PutEnrichPolicyAction.INSTANCE, TransportPutEnrichPolicyAction.class),
|
||||
new ActionHandler<>(ExecuteEnrichPolicyAction.INSTANCE, TransportExecuteEnrichPolicyAction.class),
|
||||
new ActionHandler<>(EnrichStatsAction.INSTANCE, TransportEnrichStatsAction.class),
|
||||
new ActionHandler<>(EnrichCoordinatorProxyAction.INSTANCE, EnrichCoordinatorProxyAction.TransportAction.class),
|
||||
new ActionHandler<>(EnrichShardMultiSearchAction.INSTANCE, EnrichShardMultiSearchAction.TransportAction.class),
|
||||
new ActionHandler<>(EnrichCoordinatorStatsAction.INSTANCE, EnrichCoordinatorStatsAction.TransportAction.class)
|
||||
);
|
||||
}
|
||||
|
||||
public List<RestHandler> getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings,
|
||||
IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Supplier<DiscoveryNodes> nodesInCluster) {
|
||||
if (enabled == false) {
|
||||
return emptyList();
|
||||
}
|
||||
|
||||
return Arrays.asList(
|
||||
new RestGetEnrichPolicyAction(restController),
|
||||
new RestDeleteEnrichPolicyAction(restController),
|
||||
new RestPutEnrichPolicyAction(restController),
|
||||
new RestExecuteEnrichPolicyAction(restController),
|
||||
new RestEnrichStatsAction(restController)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
|
||||
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
|
||||
NamedXContentRegistry xContentRegistry, Environment environment,
|
||||
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
if (enabled == false || transportClientMode) {
|
||||
return emptyList();
|
||||
}
|
||||
|
||||
EnrichPolicyLocks enrichPolicyLocks = new EnrichPolicyLocks();
|
||||
EnrichPolicyMaintenanceService enrichPolicyMaintenanceService = new EnrichPolicyMaintenanceService(settings, client,
|
||||
clusterService, threadPool, enrichPolicyLocks);
|
||||
enrichPolicyMaintenanceService.initialize();
|
||||
return Arrays.asList(
|
||||
enrichPolicyLocks,
|
||||
new EnrichCoordinatorProxyAction.Coordinator(client, settings),
|
||||
enrichPolicyMaintenanceService
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
||||
return Arrays.asList(
|
||||
new NamedWriteableRegistry.Entry(MetaData.Custom.class, EnrichMetadata.TYPE, EnrichMetadata::new),
|
||||
new NamedWriteableRegistry.Entry(NamedDiff.class, EnrichMetadata.TYPE,
|
||||
in -> EnrichMetadata.readDiffFrom(MetaData.Custom.class, EnrichMetadata.TYPE, in))
|
||||
);
|
||||
}
|
||||
|
||||
public List<NamedXContentRegistry.Entry> getNamedXContent() {
|
||||
return Arrays.asList(
|
||||
new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField(EnrichMetadata.TYPE), EnrichMetadata::fromXContent)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(
|
||||
ENRICH_FETCH_SIZE_SETTING,
|
||||
ENRICH_MAX_CONCURRENT_POLICY_EXECUTIONS,
|
||||
ENRICH_CLEANUP_PERIOD,
|
||||
COORDINATOR_PROXY_MAX_CONCURRENT_REQUESTS,
|
||||
COORDINATOR_PROXY_MAX_LOOKUPS_PER_REQUEST,
|
||||
COORDINATOR_PROXY_QUEUE_CAPACITY,
|
||||
ENRICH_MAX_FORCE_MERGE_ATTEMPTS
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,203 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskAwareRequest;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.tasks.TaskListener;
|
||||
import org.elasticsearch.tasks.TaskManager;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus;
|
||||
|
||||
public class EnrichPolicyExecutor {
|
||||
|
||||
private final ClusterService clusterService;
|
||||
private final Client client;
|
||||
private final TaskManager taskManager;
|
||||
private final ThreadPool threadPool;
|
||||
private final IndexNameExpressionResolver indexNameExpressionResolver;
|
||||
private final LongSupplier nowSupplier;
|
||||
private final int fetchSize;
|
||||
private final EnrichPolicyLocks policyLocks;
|
||||
private final int maximumConcurrentPolicyExecutions;
|
||||
private final int maxForceMergeAttempts;
|
||||
private final Semaphore policyExecutionPermits;
|
||||
|
||||
public EnrichPolicyExecutor(Settings settings,
|
||||
ClusterService clusterService,
|
||||
Client client,
|
||||
TaskManager taskManager,
|
||||
ThreadPool threadPool,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
EnrichPolicyLocks policyLocks,
|
||||
LongSupplier nowSupplier) {
|
||||
this.clusterService = clusterService;
|
||||
this.client = client;
|
||||
this.taskManager = taskManager;
|
||||
this.threadPool = threadPool;
|
||||
this.indexNameExpressionResolver = indexNameExpressionResolver;
|
||||
this.nowSupplier = nowSupplier;
|
||||
this.policyLocks = policyLocks;
|
||||
this.fetchSize = EnrichPlugin.ENRICH_FETCH_SIZE_SETTING.get(settings);
|
||||
this.maximumConcurrentPolicyExecutions = EnrichPlugin.ENRICH_MAX_CONCURRENT_POLICY_EXECUTIONS.get(settings);
|
||||
this.maxForceMergeAttempts = EnrichPlugin.ENRICH_MAX_FORCE_MERGE_ATTEMPTS.get(settings);
|
||||
this.policyExecutionPermits = new Semaphore(maximumConcurrentPolicyExecutions);
|
||||
}
|
||||
|
||||
private void tryLockingPolicy(String policyName) {
|
||||
policyLocks.lockPolicy(policyName);
|
||||
if (policyExecutionPermits.tryAcquire() == false) {
|
||||
// Release policy lock, and throw a different exception
|
||||
policyLocks.releasePolicy(policyName);
|
||||
throw new EsRejectedExecutionException("Policy execution failed. Policy execution for [" + policyName + "] would exceed " +
|
||||
"maximum concurrent policy executions [" + maximumConcurrentPolicyExecutions + "]");
|
||||
}
|
||||
}
|
||||
|
||||
private void releasePolicy(String policyName) {
|
||||
try {
|
||||
policyExecutionPermits.release();
|
||||
} finally {
|
||||
policyLocks.releasePolicy(policyName);
|
||||
}
|
||||
}
|
||||
|
||||
private class PolicyCompletionListener implements ActionListener<ExecuteEnrichPolicyStatus> {
|
||||
private final String policyName;
|
||||
private final ExecuteEnrichPolicyTask task;
|
||||
private final BiConsumer<Task, ExecuteEnrichPolicyStatus> onResponse;
|
||||
private final BiConsumer<Task, Exception> onFailure;
|
||||
|
||||
PolicyCompletionListener(String policyName, ExecuteEnrichPolicyTask task,
|
||||
BiConsumer<Task, ExecuteEnrichPolicyStatus> onResponse, BiConsumer<Task, Exception> onFailure) {
|
||||
this.policyName = policyName;
|
||||
this.task = task;
|
||||
this.onResponse = onResponse;
|
||||
this.onFailure = onFailure;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResponse(ExecuteEnrichPolicyStatus status) {
|
||||
assert ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE.equals(status.getPhase()) : "incomplete task returned";
|
||||
releasePolicy(policyName);
|
||||
try {
|
||||
taskManager.unregister(task);
|
||||
} finally {
|
||||
onResponse.accept(task, status);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// Set task status to failed to avoid having to catch and rethrow exceptions everywhere
|
||||
task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.FAILED));
|
||||
releasePolicy(policyName);
|
||||
try {
|
||||
taskManager.unregister(task);
|
||||
} finally {
|
||||
onFailure.accept(task, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected Runnable createPolicyRunner(String policyName, EnrichPolicy policy, ExecuteEnrichPolicyTask task,
|
||||
ActionListener<ExecuteEnrichPolicyStatus> listener) {
|
||||
return new EnrichPolicyRunner(policyName, policy, task, listener, clusterService, client, indexNameExpressionResolver, nowSupplier,
|
||||
fetchSize, maxForceMergeAttempts);
|
||||
}
|
||||
|
||||
private EnrichPolicy getPolicy(ExecuteEnrichPolicyAction.Request request) {
|
||||
// Look up policy in policy store and execute it
|
||||
EnrichPolicy policy = EnrichStore.getPolicy(request.getName(), clusterService.state());
|
||||
if (policy == null) {
|
||||
throw new IllegalArgumentException("Policy execution failed. Could not locate policy with id [" + request.getName() + "]");
|
||||
}
|
||||
return policy;
|
||||
}
|
||||
|
||||
public Task runPolicy(ExecuteEnrichPolicyAction.Request request, ActionListener<ExecuteEnrichPolicyStatus> listener) {
|
||||
return runPolicy(request, getPolicy(request), listener);
|
||||
}
|
||||
|
||||
public Task runPolicy(ExecuteEnrichPolicyAction.Request request, TaskListener<ExecuteEnrichPolicyStatus> listener) {
|
||||
return runPolicy(request, getPolicy(request), listener);
|
||||
}
|
||||
|
||||
public Task runPolicy(ExecuteEnrichPolicyAction.Request request, EnrichPolicy policy,
|
||||
ActionListener<ExecuteEnrichPolicyStatus> listener) {
|
||||
return runPolicy(request, policy, (t, r) -> listener.onResponse(r), (t, e) -> listener.onFailure(e));
|
||||
}
|
||||
|
||||
public Task runPolicy(ExecuteEnrichPolicyAction.Request request, EnrichPolicy policy,
|
||||
TaskListener<ExecuteEnrichPolicyStatus> listener) {
|
||||
return runPolicy(request, policy, listener::onResponse, listener::onFailure);
|
||||
}
|
||||
|
||||
private Task runPolicy(ExecuteEnrichPolicyAction.Request request, EnrichPolicy policy,
|
||||
BiConsumer<Task, ExecuteEnrichPolicyStatus> onResponse, BiConsumer<Task, Exception> onFailure) {
|
||||
tryLockingPolicy(request.getName());
|
||||
try {
|
||||
return runPolicyTask(request, policy, onResponse, onFailure);
|
||||
} catch (Exception e) {
|
||||
// Be sure to unlock if submission failed.
|
||||
releasePolicy(request.getName());
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private Task runPolicyTask(final ExecuteEnrichPolicyAction.Request request, EnrichPolicy policy,
|
||||
BiConsumer<Task, ExecuteEnrichPolicyStatus> onResponse, BiConsumer<Task, Exception> onFailure) {
|
||||
Task asyncTask = taskManager.register("enrich", "policy_execution", new TaskAwareRequest() {
|
||||
@Override
|
||||
public void setParentTask(TaskId taskId) {
|
||||
request.setParentTask(taskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TaskId getParentTask() {
|
||||
return request.getParentTask();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
|
||||
return new ExecuteEnrichPolicyTask(id, type, action, getDescription(), parentTaskId, headers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return request.getName();
|
||||
}
|
||||
});
|
||||
ExecuteEnrichPolicyTask task = (ExecuteEnrichPolicyTask) asyncTask;
|
||||
try {
|
||||
task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.SCHEDULED));
|
||||
PolicyCompletionListener completionListener = new PolicyCompletionListener(request.getName(), task, onResponse, onFailure);
|
||||
Runnable runnable = createPolicyRunner(request.getName(), policy, task, completionListener);
|
||||
threadPool.executor(ThreadPool.Names.GENERIC).execute(runnable);
|
||||
return asyncTask;
|
||||
} catch (Exception e) {
|
||||
// Unregister task in case of exception
|
||||
task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.FAILED));
|
||||
taskManager.unregister(asyncTask);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,124 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.locks.ReadWriteLock;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
|
||||
/**
|
||||
* A coordination object that allows multiple distinct polices to be executed concurrently, but also makes sure that a single
|
||||
* policy can only have one execution in flight at a time. Additionally, this class allows for capturing the current execution
|
||||
* state of any policy executions in flight. This execution state can be captured and then later be used to verify that no policy
|
||||
* executions have started in the time between the first state capturing.
|
||||
*/
|
||||
public class EnrichPolicyLocks {
|
||||
|
||||
/**
|
||||
* A snapshot in time detailing if any policy executions are in flight and total number of local executions that
|
||||
* have been kicked off since the node has started
|
||||
*/
|
||||
public static class EnrichPolicyExecutionState {
|
||||
final boolean anyPolicyInFlight;
|
||||
final long executions;
|
||||
|
||||
EnrichPolicyExecutionState(boolean anyPolicyInFlight, long executions) {
|
||||
this.anyPolicyInFlight = anyPolicyInFlight;
|
||||
this.executions = executions;
|
||||
}
|
||||
|
||||
public boolean isAnyPolicyInFlight() {
|
||||
return anyPolicyInFlight;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A read-write lock that allows for policies to be executed concurrently with minimal overhead, but allows for blocking
|
||||
* policy locking operations while capturing the state of policy executions.
|
||||
*/
|
||||
private final ReadWriteLock currentStateLock = new ReentrantReadWriteLock(true);
|
||||
|
||||
/**
|
||||
* A mapping of policy name to a semaphore used for ensuring that a single policy can only have one execution in flight
|
||||
* at a time.
|
||||
*/
|
||||
private final ConcurrentHashMap<String, Semaphore> policyLocks = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* A counter that is used as a sort of policy execution sequence id / dirty bit. This is incremented every time a policy
|
||||
* successfully acquires an execution lock.
|
||||
*/
|
||||
private final AtomicLong policyRunCounter = new AtomicLong(0L);
|
||||
|
||||
/**
|
||||
* Locks a policy to prevent concurrent execution. If the policy is currently executing, this method will immediately
|
||||
* throw without waiting. This method only blocks if another thread is currently capturing the current policy execution state.
|
||||
* @param policyName The policy name to lock for execution
|
||||
* @throws EsRejectedExecutionException if the policy is locked already or if the maximum number of concurrent policy executions
|
||||
* has been reached
|
||||
*/
|
||||
public void lockPolicy(String policyName) {
|
||||
currentStateLock.readLock().lock();
|
||||
try {
|
||||
Semaphore runLock = policyLocks.computeIfAbsent(policyName, (name) -> new Semaphore(1));
|
||||
boolean acquired = runLock.tryAcquire();
|
||||
if (acquired == false) {
|
||||
throw new EsRejectedExecutionException("Could not obtain lock because policy execution for [" + policyName +
|
||||
"] is already in progress.");
|
||||
}
|
||||
policyRunCounter.incrementAndGet();
|
||||
} finally {
|
||||
currentStateLock.readLock().unlock();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Captures a snapshot of the current policy execution state. This method never blocks, instead assuming that a policy is
|
||||
* currently starting its execution and returns an appropriate state.
|
||||
* @return The current state of in-flight policy executions
|
||||
*/
|
||||
public EnrichPolicyExecutionState captureExecutionState() {
|
||||
if (currentStateLock.writeLock().tryLock()) {
|
||||
try {
|
||||
long revision = policyRunCounter.get();
|
||||
long currentPolicyExecutions = policyLocks.mappingCount();
|
||||
return new EnrichPolicyExecutionState(currentPolicyExecutions > 0L, revision);
|
||||
} finally {
|
||||
currentStateLock.writeLock().unlock();
|
||||
}
|
||||
}
|
||||
return new EnrichPolicyExecutionState(true, policyRunCounter.get());
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the current execution state matches that of the given execution state. Used to ensure that over a period of time
|
||||
* no changes to the policy execution state have occurred.
|
||||
* @param previousState The previous state to check the current state against
|
||||
* @return true if the current state matches the given previous state, false if policy executions have changed over time.
|
||||
*/
|
||||
boolean isSameState(EnrichPolicyExecutionState previousState) {
|
||||
EnrichPolicyExecutionState currentState = captureExecutionState();
|
||||
return currentState.anyPolicyInFlight == previousState.anyPolicyInFlight &&
|
||||
currentState.executions == previousState.executions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Releases the lock for a given policy name, allowing it to be executed.
|
||||
* @param policyName The policy to release.
|
||||
*/
|
||||
public void releasePolicy(String policyName) {
|
||||
currentStateLock.readLock().lock();
|
||||
try {
|
||||
policyLocks.remove(policyName);
|
||||
} finally {
|
||||
currentStateLock.readLock().unlock();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,225 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.LocalNodeMasterListener;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.component.LifecycleListener;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
import org.elasticsearch.common.xcontent.ObjectPath;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.threadpool.Scheduler;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Semaphore;
|
||||
|
||||
public class EnrichPolicyMaintenanceService implements LocalNodeMasterListener {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(EnrichPolicyMaintenanceService.class);
|
||||
|
||||
private static final String MAPPING_POLICY_FIELD_PATH = "_meta." + EnrichPolicyRunner.ENRICH_POLICY_NAME_FIELD_NAME;
|
||||
private static final IndicesOptions IGNORE_UNAVAILABLE = IndicesOptions.fromOptions(true, false, false, false);
|
||||
|
||||
private final Settings settings;
|
||||
private final Client client;
|
||||
private final ClusterService clusterService;
|
||||
private final ThreadPool threadPool;
|
||||
private final EnrichPolicyLocks enrichPolicyLocks;
|
||||
|
||||
private volatile boolean isMaster = false;
|
||||
private volatile Scheduler.Cancellable cancellable;
|
||||
private final Semaphore maintenanceLock = new Semaphore(1);
|
||||
|
||||
EnrichPolicyMaintenanceService(Settings settings, Client client, ClusterService clusterService, ThreadPool threadPool,
|
||||
EnrichPolicyLocks enrichPolicyLocks) {
|
||||
this.settings = settings;
|
||||
this.client = client;
|
||||
this.clusterService = clusterService;
|
||||
this.threadPool = threadPool;
|
||||
this.enrichPolicyLocks = enrichPolicyLocks;
|
||||
}
|
||||
|
||||
void initialize() {
|
||||
clusterService.addLocalNodeMasterListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMaster() {
|
||||
if (cancellable == null || cancellable.isCancelled()) {
|
||||
isMaster = true;
|
||||
scheduleNext();
|
||||
clusterService.addLifecycleListener(new LifecycleListener() {
|
||||
@Override
|
||||
public void beforeStop() {
|
||||
offMaster();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void offMaster() {
|
||||
if (cancellable != null && cancellable.isCancelled() == false) {
|
||||
isMaster = false;
|
||||
cancellable.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String executorName() {
|
||||
return ThreadPool.Names.GENERIC;
|
||||
}
|
||||
|
||||
private void scheduleNext() {
|
||||
if (isMaster) {
|
||||
try {
|
||||
TimeValue waitTime = EnrichPlugin.ENRICH_CLEANUP_PERIOD.get(settings);
|
||||
cancellable = threadPool.schedule(this::execute, waitTime, ThreadPool.Names.GENERIC);
|
||||
} catch (EsRejectedExecutionException e) {
|
||||
if (e.isExecutorShutdown()) {
|
||||
logger.debug("Failed to schedule next [enrich] maintenance task; Shutting down", e);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.debug("No longer master; Skipping next scheduled [enrich] maintenance task");
|
||||
}
|
||||
}
|
||||
|
||||
private void execute() {
|
||||
logger.debug("Triggering scheduled [enrich] maintenance task");
|
||||
if (isMaster) {
|
||||
maybeCleanUpEnrichIndices();
|
||||
scheduleNext();
|
||||
} else {
|
||||
logger.debug("No longer master; Skipping next scheduled [enrich] maintenance task");
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeCleanUpEnrichIndices() {
|
||||
if (maintenanceLock.tryAcquire()) {
|
||||
cleanUpEnrichIndices();
|
||||
} else {
|
||||
logger.debug("Previous [enrich] maintenance task still in progress; Skipping this execution");
|
||||
}
|
||||
}
|
||||
|
||||
void concludeMaintenance() {
|
||||
maintenanceLock.release();
|
||||
}
|
||||
|
||||
void cleanUpEnrichIndices() {
|
||||
final Map<String, EnrichPolicy> policies = EnrichStore.getPolicies(clusterService.state());
|
||||
GetIndexRequest indices = new GetIndexRequest()
|
||||
.indices(EnrichPolicy.ENRICH_INDEX_NAME_BASE + "*")
|
||||
.indicesOptions(IndicesOptions.lenientExpand());
|
||||
// Check that no enrich policies are being executed
|
||||
final EnrichPolicyLocks.EnrichPolicyExecutionState executionState = enrichPolicyLocks.captureExecutionState();
|
||||
if (executionState.isAnyPolicyInFlight() == false) {
|
||||
client.admin().indices().getIndex(indices, new ActionListener<GetIndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetIndexResponse getIndexResponse) {
|
||||
// Ensure that no enrich policy executions started while we were retrieving the snapshot of index data
|
||||
// If executions were kicked off, we can't be sure that the indices we are about to process are a
|
||||
// stable state of the system (they could be new indices created by a policy that hasn't been published yet).
|
||||
if (enrichPolicyLocks.isSameState(executionState)) {
|
||||
String[] removeIndices = Arrays.stream(getIndexResponse.getIndices())
|
||||
.filter(indexName -> shouldRemoveIndex(getIndexResponse, policies, indexName))
|
||||
.toArray(String[]::new);
|
||||
deleteIndices(removeIndices);
|
||||
} else {
|
||||
logger.debug("Skipping enrich index cleanup since enrich policy was executed while gathering indices");
|
||||
concludeMaintenance();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error("Failed to get indices during enrich index maintenance task", e);
|
||||
concludeMaintenance();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
concludeMaintenance();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean shouldRemoveIndex(GetIndexResponse getIndexResponse, Map<String, EnrichPolicy> policies, String indexName) {
|
||||
// Find the policy on the index
|
||||
logger.debug("Checking if should remove enrich index [{}]", indexName);
|
||||
ImmutableOpenMap<String, MappingMetaData> indexMapping = getIndexResponse.getMappings().get(indexName);
|
||||
MappingMetaData mappingMetaData = indexMapping.get(MapperService.SINGLE_MAPPING_NAME);
|
||||
Map<String, Object> mapping = mappingMetaData.getSourceAsMap();
|
||||
String policyName = ObjectPath.eval(MAPPING_POLICY_FIELD_PATH, mapping);
|
||||
// Check if index has a corresponding policy
|
||||
if (policyName == null || policies.containsKey(policyName) == false) {
|
||||
// No corresponding policy. Index should be marked for removal.
|
||||
logger.debug("Enrich index [{}] does not correspond to any existing policy. Found policy name [{}]", indexName, policyName);
|
||||
return true;
|
||||
}
|
||||
// Check if index is currently linked to an alias
|
||||
final String aliasName = EnrichPolicy.getBaseName(policyName);
|
||||
List<AliasMetaData> aliasMetadata = getIndexResponse.aliases().get(indexName);
|
||||
if (aliasMetadata == null) {
|
||||
logger.debug("Enrich index [{}] is not marked as a live index since it has no alias information", indexName);
|
||||
return true;
|
||||
}
|
||||
boolean hasAlias = aliasMetadata
|
||||
.stream()
|
||||
.anyMatch((aliasMetaData -> aliasMetaData.getAlias().equals(aliasName)));
|
||||
// Index is not currently published to the enrich alias. Should be marked for removal.
|
||||
if (hasAlias == false) {
|
||||
logger.debug("Enrich index [{}] is not marked as a live index since it lacks the alias [{}]", indexName, aliasName);
|
||||
return true;
|
||||
}
|
||||
logger.debug("Enrich index [{}] was spared since it is associated with the valid policy [{}] and references alias [{}]",
|
||||
indexName, policyName, aliasName);
|
||||
return false;
|
||||
}
|
||||
|
||||
private void deleteIndices(String[] removeIndices) {
|
||||
if (removeIndices.length != 0) {
|
||||
DeleteIndexRequest deleteIndices = new DeleteIndexRequest()
|
||||
.indices(removeIndices)
|
||||
.indicesOptions(IGNORE_UNAVAILABLE);
|
||||
client.admin().indices().delete(deleteIndices, new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
|
||||
logger.debug("Completed deletion of stale enrich indices [{}]", () -> Arrays.toString(removeIndices));
|
||||
concludeMaintenance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
logger.error(() -> "Enrich maintenance task could not delete abandoned enrich indices [" +
|
||||
Arrays.toString(removeIndices) + "]", e);
|
||||
concludeMaintenance();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
concludeMaintenance();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.ingest.IngestMetadata;
|
||||
import org.elasticsearch.ingest.PipelineConfiguration;
|
||||
|
||||
/**
|
||||
* Manages the definitions and lifecycle of the ingest pipeline used by the reindex operation within the Enrich Policy execution.
|
||||
*/
|
||||
public class EnrichPolicyReindexPipeline {
|
||||
|
||||
/**
|
||||
* The current version of the pipeline definition. Used in the pipeline's name to differentiate from breaking changes
|
||||
* (separate from product version).
|
||||
*/
|
||||
static final String CURRENT_PIPELINE_VERSION_NAME = "7";
|
||||
|
||||
/**
|
||||
* The last version of the distribution that updated the pipelines definition.
|
||||
* TODO: This should be the version of ES that Enrich first ships in, which likely doesn't exist yet.
|
||||
*/
|
||||
static final int ENRICH_PIPELINE_LAST_UPDATED_VERSION = Version.V_7_4_0.id;
|
||||
|
||||
static String pipelineName() {
|
||||
return "enrich-policy-reindex-" + CURRENT_PIPELINE_VERSION_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the current version of the pipeline definition is installed in the cluster
|
||||
* @param clusterState The cluster state to check
|
||||
* @return true if a pipeline exists that is compatible with this version of Enrich, false otherwise
|
||||
*/
|
||||
static boolean exists(ClusterState clusterState) {
|
||||
final IngestMetadata ingestMetadata = clusterState.getMetaData().custom(IngestMetadata.TYPE);
|
||||
// we ensure that we both have the pipeline and its version represents the current (or later) version
|
||||
if (ingestMetadata != null) {
|
||||
final PipelineConfiguration pipeline = ingestMetadata.getPipelines().get(pipelineName());
|
||||
if (pipeline != null) {
|
||||
Object version = pipeline.getConfigAsMap().get("version");
|
||||
return version instanceof Number && ((Number) version).intValue() >= ENRICH_PIPELINE_LAST_UPDATED_VERSION;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a pipeline with the current version's pipeline definition
|
||||
* @param client Client used to execute put pipeline
|
||||
* @param listener Callback used after pipeline has been created
|
||||
*/
|
||||
public static void create(Client client, ActionListener<AcknowledgedResponse> listener) {
|
||||
final BytesReference pipeline = BytesReference.bytes(currentEnrichPipelineDefinition(XContentType.JSON));
|
||||
final PutPipelineRequest request = new PutPipelineRequest(pipelineName(), pipeline, XContentType.JSON);
|
||||
client.admin().cluster().putPipeline(request, listener);
|
||||
}
|
||||
|
||||
private static XContentBuilder currentEnrichPipelineDefinition(XContentType xContentType) {
|
||||
try {
|
||||
return XContentBuilder.builder(xContentType.xContent())
|
||||
.startObject()
|
||||
.field("description", "This pipeline sanitizes documents that will be stored in enrich indices for ingest lookup " +
|
||||
"purposes. It is an internal pipeline and should not be modified.")
|
||||
.field("version", ENRICH_PIPELINE_LAST_UPDATED_VERSION)
|
||||
.startArray("processors")
|
||||
.startObject()
|
||||
// remove the id from the document so that documents from multiple indices will always be unique.
|
||||
.startObject("remove")
|
||||
.field("field", "_id")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endArray()
|
||||
.endObject();
|
||||
} catch (final IOException e) {
|
||||
throw new UncheckedIOException("Failed to create pipeline for enrich document sanitization", e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,470 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
|
||||
import org.elasticsearch.action.admin.indices.segments.IndexSegments;
|
||||
import org.elasticsearch.action.admin.indices.segments.IndexShardSegments;
|
||||
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse;
|
||||
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest;
|
||||
import org.elasticsearch.action.admin.indices.segments.ShardSegments;
|
||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.reindex.BulkByScrollResponse;
|
||||
import org.elasticsearch.index.reindex.ReindexAction;
|
||||
import org.elasticsearch.index.reindex.ReindexRequest;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
public class EnrichPolicyRunner implements Runnable {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(EnrichPolicyRunner.class);
|
||||
|
||||
static final String ENRICH_POLICY_NAME_FIELD_NAME = "enrich_policy_name";
|
||||
static final String ENRICH_POLICY_TYPE_FIELD_NAME = "enrich_policy_type";
|
||||
static final String ENRICH_MATCH_FIELD_NAME = "enrich_match_field";
|
||||
static final String ENRICH_README_FIELD_NAME = "enrich_readme";
|
||||
|
||||
static final String ENRICH_INDEX_README_TEXT = "This index is managed by Elasticsearch and should not be modified in any way.";
|
||||
|
||||
private final String policyName;
|
||||
private final EnrichPolicy policy;
|
||||
private final ExecuteEnrichPolicyTask task;
|
||||
private final ActionListener<ExecuteEnrichPolicyStatus> listener;
|
||||
private final ClusterService clusterService;
|
||||
private final Client client;
|
||||
private final IndexNameExpressionResolver indexNameExpressionResolver;
|
||||
private final LongSupplier nowSupplier;
|
||||
private final int fetchSize;
|
||||
private final int maxForceMergeAttempts;
|
||||
|
||||
EnrichPolicyRunner(String policyName, EnrichPolicy policy, ExecuteEnrichPolicyTask task,
|
||||
ActionListener<ExecuteEnrichPolicyStatus> listener, ClusterService clusterService, Client client,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, LongSupplier nowSupplier, int fetchSize,
|
||||
int maxForceMergeAttempts) {
|
||||
this.policyName = policyName;
|
||||
this.policy = policy;
|
||||
this.task = task;
|
||||
this.listener = listener;
|
||||
this.clusterService = clusterService;
|
||||
this.client = client;
|
||||
this.indexNameExpressionResolver = indexNameExpressionResolver;
|
||||
this.nowSupplier = nowSupplier;
|
||||
this.fetchSize = fetchSize;
|
||||
this.maxForceMergeAttempts = maxForceMergeAttempts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
logger.info("Policy [{}]: Running enrich policy", policyName);
|
||||
task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.RUNNING));
|
||||
// Collect the source index information
|
||||
final String[] sourceIndices = policy.getIndices().toArray(new String[0]);
|
||||
logger.debug("Policy [{}]: Checking source indices [{}]", policyName, sourceIndices);
|
||||
GetIndexRequest getIndexRequest = new GetIndexRequest().indices(sourceIndices);
|
||||
client.admin().indices().getIndex(getIndexRequest, new ActionListener<GetIndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetIndexResponse getIndexResponse) {
|
||||
validateMappings(getIndexResponse);
|
||||
prepareAndCreateEnrichIndex();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private Map<String, Object> getMappings(final GetIndexResponse getIndexResponse, final String sourceIndexName) {
|
||||
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = getIndexResponse.mappings();
|
||||
ImmutableOpenMap<String, MappingMetaData> indexMapping = mappings.get(sourceIndexName);
|
||||
if (indexMapping.keys().size() == 0) {
|
||||
throw new ElasticsearchException(
|
||||
"Enrich policy execution for [{}] failed. No mapping available on source [{}] included in [{}]",
|
||||
policyName, sourceIndexName, policy.getIndices());
|
||||
}
|
||||
assert indexMapping.keys().size() == 1 : "Expecting only one type per index";
|
||||
MappingMetaData typeMapping = indexMapping.iterator().next().value;
|
||||
return typeMapping.sourceAsMap();
|
||||
}
|
||||
|
||||
private void validateMappings(final GetIndexResponse getIndexResponse) {
|
||||
String[] sourceIndices = getIndexResponse.getIndices();
|
||||
logger.debug("Policy [{}]: Validating [{}] source mappings", policyName, sourceIndices);
|
||||
for (String sourceIndex : sourceIndices) {
|
||||
Map<String, Object> mapping = getMappings(getIndexResponse, sourceIndex);
|
||||
// First ensure mapping is set
|
||||
if (mapping.get("properties") == null) {
|
||||
throw new ElasticsearchException(
|
||||
"Enrich policy execution for [{}] failed. Could not read mapping for source [{}] included by pattern [{}]",
|
||||
policyName, sourceIndex, policy.getIndices());
|
||||
}
|
||||
// Validate the key and values
|
||||
try {
|
||||
validateField(mapping, policy.getMatchField(), true);
|
||||
for (String valueFieldName : policy.getEnrichFields()) {
|
||||
validateField(mapping, valueFieldName, false);
|
||||
}
|
||||
} catch (ElasticsearchException e) {
|
||||
throw new ElasticsearchException(
|
||||
"Enrich policy execution for [{}] failed while validating field mappings for index [{}]",
|
||||
e, policyName, sourceIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void validateField(Map<?, ?> properties, String fieldName, boolean fieldRequired) {
|
||||
assert Strings.isEmpty(fieldName) == false: "Field name cannot be null or empty";
|
||||
String[] fieldParts = fieldName.split("\\.");
|
||||
StringBuilder parent = new StringBuilder();
|
||||
Map<?, ?> currentField = properties;
|
||||
boolean onRoot = true;
|
||||
for (String fieldPart : fieldParts) {
|
||||
// Ensure that the current field is of object type only (not a nested type or a non compound field)
|
||||
Object type = currentField.get("type");
|
||||
if (type != null && "object".equals(type) == false) {
|
||||
throw new ElasticsearchException(
|
||||
"Could not traverse mapping to field [{}]. The [{}] field must be regular object but was [{}].",
|
||||
fieldName,
|
||||
onRoot ? "root" : parent.toString(),
|
||||
type
|
||||
);
|
||||
}
|
||||
Map<?, ?> currentProperties = ((Map<?, ?>) currentField.get("properties"));
|
||||
if (currentProperties == null) {
|
||||
if (fieldRequired) {
|
||||
throw new ElasticsearchException(
|
||||
"Could not traverse mapping to field [{}]. Expected the [{}] field to have sub fields but none were configured.",
|
||||
fieldName,
|
||||
onRoot ? "root" : parent.toString()
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
currentField = ((Map<?, ?>) currentProperties.get(fieldPart));
|
||||
if (currentField == null) {
|
||||
if (fieldRequired) {
|
||||
throw new ElasticsearchException(
|
||||
"Could not traverse mapping to field [{}]. Could not find the [{}] field under [{}]",
|
||||
fieldName,
|
||||
fieldPart,
|
||||
onRoot ? "root" : parent.toString()
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (onRoot) {
|
||||
onRoot = false;
|
||||
} else {
|
||||
parent.append(".");
|
||||
}
|
||||
parent.append(fieldPart);
|
||||
}
|
||||
}
|
||||
|
||||
private XContentBuilder resolveEnrichMapping(final EnrichPolicy policy) {
|
||||
// Currently the only supported policy type is EnrichPolicy.MATCH_TYPE, which is a keyword type
|
||||
final String keyType;
|
||||
final CheckedFunction<XContentBuilder, XContentBuilder, IOException> matchFieldMapping;
|
||||
if (EnrichPolicy.MATCH_TYPE.equals(policy.getType())) {
|
||||
matchFieldMapping = (builder) -> builder.field("type", "keyword").field("doc_values", false);
|
||||
// No need to also configure index_options, because keyword type defaults to 'docs'.
|
||||
} else if (EnrichPolicy.GEO_MATCH_TYPE.equals(policy.getType())) {
|
||||
matchFieldMapping = (builder) -> builder.field("type", "geo_shape");
|
||||
} else {
|
||||
throw new ElasticsearchException("Unrecognized enrich policy type [{}]", policy.getType());
|
||||
}
|
||||
|
||||
// Enable _source on enrich index. Explicitly mark key mapping type.
|
||||
try {
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
builder = builder.startObject()
|
||||
.startObject(MapperService.SINGLE_MAPPING_NAME)
|
||||
.field("dynamic", false)
|
||||
.startObject("_source")
|
||||
.field("enabled", true)
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject(policy.getMatchField());
|
||||
builder = matchFieldMapping.apply(builder).endObject().endObject()
|
||||
.startObject("_meta")
|
||||
.field(ENRICH_README_FIELD_NAME, ENRICH_INDEX_README_TEXT)
|
||||
.field(ENRICH_POLICY_NAME_FIELD_NAME, policyName)
|
||||
.field(ENRICH_MATCH_FIELD_NAME, policy.getMatchField())
|
||||
.field(ENRICH_POLICY_TYPE_FIELD_NAME, policy.getType())
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
return builder;
|
||||
} catch (IOException ioe) {
|
||||
throw new UncheckedIOException("Could not render enrich mapping", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
private void prepareAndCreateEnrichIndex() {
|
||||
long nowTimestamp = nowSupplier.getAsLong();
|
||||
String enrichIndexName = EnrichPolicy.getBaseName(policyName) + "-" + nowTimestamp;
|
||||
Settings enrichIndexSettings = Settings.builder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
// No changes will be made to an enrich index after policy execution, so need to enable automatic refresh interval:
|
||||
.put("index.refresh_interval", -1)
|
||||
// This disables eager global ordinals loading for all fields:
|
||||
.put("index.warmer.enabled", false)
|
||||
.build();
|
||||
CreateIndexRequest createEnrichIndexRequest = new CreateIndexRequest(enrichIndexName, enrichIndexSettings);
|
||||
createEnrichIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, resolveEnrichMapping(policy));
|
||||
logger.debug("Policy [{}]: Creating new enrich index [{}]", policyName, enrichIndexName);
|
||||
client.admin().indices().create(createEnrichIndexRequest, new ActionListener<CreateIndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(CreateIndexResponse createIndexResponse) {
|
||||
prepareReindexOperation(enrichIndexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void prepareReindexOperation(final String destinationIndexName) {
|
||||
// Check to make sure that the enrich pipeline exists, and create it if it is missing.
|
||||
if (EnrichPolicyReindexPipeline.exists(clusterService.state()) == false) {
|
||||
EnrichPolicyReindexPipeline.create(client, new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
|
||||
transferDataToEnrichIndex(destinationIndexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
transferDataToEnrichIndex(destinationIndexName);
|
||||
}
|
||||
}
|
||||
|
||||
private void transferDataToEnrichIndex(final String destinationIndexName) {
|
||||
logger.debug("Policy [{}]: Transferring source data to new enrich index [{}]", policyName, destinationIndexName);
|
||||
// Filter down the source fields to just the ones required by the policy
|
||||
final Set<String> retainFields = new HashSet<>();
|
||||
retainFields.add(policy.getMatchField());
|
||||
retainFields.addAll(policy.getEnrichFields());
|
||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||
searchSourceBuilder.size(fetchSize);
|
||||
searchSourceBuilder.fetchSource(retainFields.toArray(new String[0]), new String[0]);
|
||||
if (policy.getQuery() != null) {
|
||||
searchSourceBuilder.query(QueryBuilders.wrapperQuery(policy.getQuery().getQuery()));
|
||||
}
|
||||
ReindexRequest reindexRequest = new ReindexRequest()
|
||||
.setDestIndex(destinationIndexName)
|
||||
.setSourceIndices(policy.getIndices().toArray(new String[0]));
|
||||
reindexRequest.getSearchRequest().source(searchSourceBuilder);
|
||||
reindexRequest.getDestination().source(new BytesArray(new byte[0]), XContentType.SMILE);
|
||||
reindexRequest.getDestination().routing("discard");
|
||||
reindexRequest.getDestination().setPipeline(EnrichPolicyReindexPipeline.pipelineName());
|
||||
client.execute(ReindexAction.INSTANCE, reindexRequest, new ActionListener<BulkByScrollResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkByScrollResponse bulkByScrollResponse) {
|
||||
// Do we want to fail the request if there were failures during the reindex process?
|
||||
if (bulkByScrollResponse.getBulkFailures().size() > 0) {
|
||||
listener.onFailure(new ElasticsearchException("Encountered bulk failures during reindex process"));
|
||||
} else if (bulkByScrollResponse.getSearchFailures().size() > 0) {
|
||||
listener.onFailure(new ElasticsearchException("Encountered search failures during reindex process"));
|
||||
} else {
|
||||
logger.info("Policy [{}]: Transferred [{}] documents to enrich index [{}]", policyName,
|
||||
bulkByScrollResponse.getCreated(), destinationIndexName);
|
||||
forceMergeEnrichIndex(destinationIndexName, 1);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void forceMergeEnrichIndex(final String destinationIndexName, final int attempt) {
|
||||
logger.debug("Policy [{}]: Force merging newly created enrich index [{}] (Attempt {}/{})", policyName, destinationIndexName,
|
||||
attempt, maxForceMergeAttempts);
|
||||
client.admin().indices().forceMerge(new ForceMergeRequest(destinationIndexName).maxNumSegments(1),
|
||||
new ActionListener<ForceMergeResponse>() {
|
||||
@Override
|
||||
public void onResponse(ForceMergeResponse forceMergeResponse) {
|
||||
refreshEnrichIndex(destinationIndexName, attempt);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void refreshEnrichIndex(final String destinationIndexName, final int attempt) {
|
||||
logger.debug("Policy [{}]: Refreshing enrich index [{}]", policyName, destinationIndexName);
|
||||
client.admin().indices().refresh(new RefreshRequest(destinationIndexName), new ActionListener<RefreshResponse>() {
|
||||
@Override
|
||||
public void onResponse(RefreshResponse refreshResponse) {
|
||||
ensureSingleSegment(destinationIndexName, attempt);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
protected void ensureSingleSegment(final String destinationIndexName, final int attempt) {
|
||||
client.admin().indices().segments(new IndicesSegmentsRequest(destinationIndexName), new ActionListener<IndicesSegmentResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndicesSegmentResponse indicesSegmentResponse) {
|
||||
IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(destinationIndexName);
|
||||
if (indexSegments == null) {
|
||||
throw new ElasticsearchException("Could not locate segment information for newly created index [{}]",
|
||||
destinationIndexName);
|
||||
}
|
||||
Map<Integer, IndexShardSegments> indexShards = indexSegments.getShards();
|
||||
assert indexShards.size() == 1 : "Expected enrich index to contain only one shard";
|
||||
ShardSegments[] shardSegments = indexShards.get(0).getShards();
|
||||
assert shardSegments.length == 1 : "Expected enrich index to contain no replicas at this point";
|
||||
ShardSegments primarySegments = shardSegments[0];
|
||||
if (primarySegments.getSegments().size() > 1) {
|
||||
int nextAttempt = attempt + 1;
|
||||
if (nextAttempt > maxForceMergeAttempts) {
|
||||
listener.onFailure(new ElasticsearchException(
|
||||
"Force merging index [{}] attempted [{}] times but did not result in one segment.",
|
||||
destinationIndexName, attempt, maxForceMergeAttempts));
|
||||
} else {
|
||||
logger.debug("Policy [{}]: Force merge result contains more than one segment [{}], retrying (attempt {}/{})",
|
||||
policyName, primarySegments.getSegments().size(), nextAttempt, maxForceMergeAttempts);
|
||||
forceMergeEnrichIndex(destinationIndexName, nextAttempt);
|
||||
}
|
||||
} else {
|
||||
// Force merge down to one segment successful
|
||||
setIndexReadOnly(destinationIndexName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void setIndexReadOnly(final String destinationIndexName) {
|
||||
logger.debug("Policy [{}]: Setting new enrich index [{}] to be read only", policyName, destinationIndexName);
|
||||
UpdateSettingsRequest request = new UpdateSettingsRequest(destinationIndexName)
|
||||
.setPreserveExisting(true)
|
||||
.settings(Settings.builder()
|
||||
.put("index.auto_expand_replicas", "0-all")
|
||||
.put("index.blocks.write", "true"));
|
||||
client.admin().indices().updateSettings(request, new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
|
||||
waitForIndexGreen(destinationIndexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void waitForIndexGreen(final String destinationIndexName) {
|
||||
ClusterHealthRequest request = new ClusterHealthRequest(destinationIndexName).waitForGreenStatus();
|
||||
client.admin().cluster().health(request, new ActionListener<ClusterHealthResponse>() {
|
||||
@Override
|
||||
public void onResponse(ClusterHealthResponse clusterHealthResponse) {
|
||||
updateEnrichPolicyAlias(destinationIndexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void updateEnrichPolicyAlias(final String destinationIndexName) {
|
||||
String enrichIndexBase = EnrichPolicy.getBaseName(policyName);
|
||||
logger.debug("Policy [{}]: Promoting new enrich index [{}] to alias [{}]", policyName, destinationIndexName, enrichIndexBase);
|
||||
GetAliasesRequest aliasRequest = new GetAliasesRequest(enrichIndexBase);
|
||||
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterService.state(), aliasRequest);
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases =
|
||||
clusterService.state().metaData().findAliases(aliasRequest, concreteIndices);
|
||||
IndicesAliasesRequest aliasToggleRequest = new IndicesAliasesRequest();
|
||||
String[] indices = aliases.keys().toArray(String.class);
|
||||
if (indices.length > 0) {
|
||||
aliasToggleRequest.addAliasAction(IndicesAliasesRequest.AliasActions.remove().indices(indices).alias(enrichIndexBase));
|
||||
}
|
||||
aliasToggleRequest.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(destinationIndexName).alias(enrichIndexBase));
|
||||
client.admin().indices().aliases(aliasToggleRequest, new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
|
||||
logger.info("Policy [{}]: Policy execution complete", policyName);
|
||||
ExecuteEnrichPolicyStatus completeStatus = new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE);
|
||||
task.setStatus(completeStatus);
|
||||
listener.onResponse(completeStatus);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.AliasOrIndex;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.ingest.ConfigurationUtils;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
final class EnrichProcessorFactory implements Processor.Factory, Consumer<ClusterState> {
|
||||
|
||||
static final String TYPE = "enrich";
|
||||
private final Client client;
|
||||
|
||||
volatile MetaData metaData;
|
||||
|
||||
EnrichProcessorFactory(Client client) {
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Processor create(Map<String, Processor.Factory> processorFactories, String tag, Map<String, Object> config) throws Exception {
|
||||
String policyName = ConfigurationUtils.readStringProperty(TYPE, tag, config, "policy_name");
|
||||
String policyAlias = EnrichPolicy.getBaseName(policyName);
|
||||
AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(policyAlias);
|
||||
if (aliasOrIndex == null) {
|
||||
throw new IllegalArgumentException("no enrich index exists for policy with name [" + policyName + "]");
|
||||
}
|
||||
assert aliasOrIndex.isAlias();
|
||||
assert aliasOrIndex.getIndices().size() == 1;
|
||||
IndexMetaData imd = aliasOrIndex.getIndices().get(0);
|
||||
|
||||
String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field");
|
||||
Map<String, Object> mappingAsMap = imd.mapping().sourceAsMap();
|
||||
String policyType =
|
||||
(String) XContentMapValues.extractValue("_meta." + EnrichPolicyRunner.ENRICH_POLICY_TYPE_FIELD_NAME, mappingAsMap);
|
||||
String matchField = (String) XContentMapValues.extractValue("_meta." + EnrichPolicyRunner.ENRICH_MATCH_FIELD_NAME, mappingAsMap);
|
||||
|
||||
boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, tag, config, "ignore_missing", false);
|
||||
boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, tag, config, "override", true);
|
||||
String targetField = ConfigurationUtils.readStringProperty(TYPE, tag, config, "target_field");
|
||||
int maxMatches = ConfigurationUtils.readIntProperty(TYPE, tag, config, "max_matches", 1);
|
||||
if (maxMatches < 1 || maxMatches > 128) {
|
||||
throw ConfigurationUtils.newConfigurationException(TYPE, tag, "max_matches", "should be between 1 and 128");
|
||||
}
|
||||
|
||||
switch (policyType) {
|
||||
case EnrichPolicy.MATCH_TYPE:
|
||||
return new MatchProcessor(tag, client, policyName, field, targetField, overrideEnabled, ignoreMissing, matchField,
|
||||
maxMatches);
|
||||
case EnrichPolicy.GEO_MATCH_TYPE:
|
||||
String relationStr = ConfigurationUtils.readStringProperty(TYPE, tag, config, "shape_relation", "intersects");
|
||||
ShapeRelation shapeRelation = ShapeRelation.getRelationByName(relationStr);
|
||||
return new GeoMatchProcessor(tag, client, policyName, field, targetField, overrideEnabled, ignoreMissing, matchField,
|
||||
maxMatches, shapeRelation);
|
||||
default:
|
||||
throw new IllegalArgumentException("unsupported policy type [" + policyType + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(ClusterState state) {
|
||||
metaData = state.getMetaData();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* Helper methods for access and storage of an enrich policy.
|
||||
*/
|
||||
public final class EnrichStore {
|
||||
|
||||
private EnrichStore() {}
|
||||
|
||||
/**
|
||||
* Adds a new enrich policy or overwrites an existing policy if there is already a policy with the same name.
|
||||
* This method can only be invoked on the elected master node.
|
||||
*
|
||||
* @param name The unique name of the policy
|
||||
* @param policy The policy to store
|
||||
* @param handler The handler that gets invoked if policy has been stored or a failure has occurred.
|
||||
*/
|
||||
public static void putPolicy(String name, EnrichPolicy policy, ClusterService clusterService, Consumer<Exception> handler) {
|
||||
assert clusterService.localNode().isMasterNode();
|
||||
|
||||
if (Strings.isNullOrEmpty(name)) {
|
||||
throw new IllegalArgumentException("name is missing or empty");
|
||||
}
|
||||
if (policy == null) {
|
||||
throw new IllegalArgumentException("policy is missing");
|
||||
}
|
||||
// The policy name is used to create the enrich index name and
|
||||
// therefor a policy name has the same restrictions as an index name
|
||||
MetaDataCreateIndexService.validateIndexOrAliasName(name,
|
||||
(policyName, error) -> new IllegalArgumentException("Invalid policy name [" + policyName + "], " + error));
|
||||
if (name.toLowerCase(Locale.ROOT).equals(name) == false) {
|
||||
throw new IllegalArgumentException("Invalid policy name [" + name + "], must be lowercase");
|
||||
}
|
||||
// TODO: add policy validation
|
||||
|
||||
final EnrichPolicy finalPolicy;
|
||||
if (policy.getElasticsearchVersion() == null) {
|
||||
finalPolicy = new EnrichPolicy(
|
||||
policy.getType(),
|
||||
policy.getQuery(),
|
||||
policy.getIndices(),
|
||||
policy.getMatchField(),
|
||||
policy.getEnrichFields(),
|
||||
Version.CURRENT
|
||||
);
|
||||
} else {
|
||||
finalPolicy = policy;
|
||||
}
|
||||
updateClusterState(clusterService, handler, current -> {
|
||||
final Map<String, EnrichPolicy> policies = getPolicies(current);
|
||||
if (policies.get(name) != null) {
|
||||
throw new ResourceAlreadyExistsException("policy [{}] already exists", name);
|
||||
}
|
||||
policies.put(name, finalPolicy);
|
||||
return policies;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an enrich policy from the policies in the cluster state. This method can only be invoked on the
|
||||
* elected master node.
|
||||
*
|
||||
* @param name The unique name of the policy
|
||||
* @param handler The handler that gets invoked if policy has been stored or a failure has occurred.
|
||||
*/
|
||||
public static void deletePolicy(String name, ClusterService clusterService, Consumer<Exception> handler) {
|
||||
assert clusterService.localNode().isMasterNode();
|
||||
|
||||
if (Strings.isNullOrEmpty(name)) {
|
||||
throw new IllegalArgumentException("name is missing or empty");
|
||||
}
|
||||
|
||||
updateClusterState(clusterService, handler, current -> {
|
||||
final Map<String, EnrichPolicy> policies = getPolicies(current);
|
||||
if (policies.containsKey(name) == false) {
|
||||
throw new ResourceNotFoundException("policy [{}] not found", name);
|
||||
}
|
||||
|
||||
policies.remove(name);
|
||||
return policies;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets an enrich policy for the provided name if exists or otherwise returns <code>null</code>.
|
||||
*
|
||||
* @param name The name of the policy to fetch
|
||||
* @return enrich policy if exists or <code>null</code> otherwise
|
||||
*/
|
||||
public static EnrichPolicy getPolicy(String name, ClusterState state) {
|
||||
if (Strings.isNullOrEmpty(name)) {
|
||||
throw new IllegalArgumentException("name is missing or empty");
|
||||
}
|
||||
|
||||
return getPolicies(state).get(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all policies in the cluster.
|
||||
*
|
||||
* @param state the cluster state
|
||||
* @return a Map of <code>policyName, EnrichPolicy</code> of the policies
|
||||
*/
|
||||
public static Map<String, EnrichPolicy> getPolicies(ClusterState state) {
|
||||
final Map<String, EnrichPolicy> policies;
|
||||
final EnrichMetadata enrichMetadata = state.metaData().custom(EnrichMetadata.TYPE);
|
||||
if (enrichMetadata != null) {
|
||||
// Make a copy, because policies map inside custom metadata is read only:
|
||||
policies = new HashMap<>(enrichMetadata.getPolicies());
|
||||
} else {
|
||||
policies = new HashMap<>();
|
||||
}
|
||||
return policies;
|
||||
}
|
||||
|
||||
private static void updateClusterState(ClusterService clusterService,
|
||||
Consumer<Exception> handler,
|
||||
Function<ClusterState, Map<String, EnrichPolicy>> function) {
|
||||
clusterService.submitStateUpdateTask("update-enrich-metadata", new ClusterStateUpdateTask() {
|
||||
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
Map<String, EnrichPolicy> policies = function.apply(currentState);
|
||||
MetaData metaData = MetaData.builder(currentState.metaData())
|
||||
.putCustom(EnrichMetadata.TYPE, new EnrichMetadata(policies))
|
||||
.build();
|
||||
return ClusterState.builder(currentState)
|
||||
.metaData(metaData)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
|
||||
handler.accept(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, Exception e) {
|
||||
handler.accept(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus;
|
||||
|
||||
class ExecuteEnrichPolicyTask extends Task {
|
||||
|
||||
private volatile ExecuteEnrichPolicyStatus status;
|
||||
|
||||
ExecuteEnrichPolicyTask(long id, String type, String action, String description, TaskId parentTask,
|
||||
Map<String, String> headers) {
|
||||
super(id, type, action, description, parentTask, headers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Status getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
void setStatus(ExecuteEnrichPolicyStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.MultiPoint;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
public final class GeoMatchProcessor extends AbstractEnrichProcessor {
|
||||
|
||||
private ShapeRelation shapeRelation;
|
||||
|
||||
GeoMatchProcessor(String tag,
|
||||
Client client,
|
||||
String policyName,
|
||||
String field,
|
||||
String targetField,
|
||||
boolean overrideEnabled,
|
||||
boolean ignoreMissing,
|
||||
String matchField,
|
||||
int maxMatches,
|
||||
ShapeRelation shapeRelation) {
|
||||
super(tag, client, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches);
|
||||
this.shapeRelation = shapeRelation;
|
||||
}
|
||||
|
||||
/** used in tests **/
|
||||
GeoMatchProcessor(String tag,
|
||||
BiConsumer<SearchRequest, BiConsumer<SearchResponse, Exception>> searchRunner,
|
||||
String policyName,
|
||||
String field,
|
||||
String targetField,
|
||||
boolean overrideEnabled,
|
||||
boolean ignoreMissing,
|
||||
String matchField,
|
||||
int maxMatches, ShapeRelation shapeRelation) {
|
||||
super(tag, searchRunner, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches);
|
||||
this.shapeRelation = shapeRelation;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public QueryBuilder getQueryBuilder(Object fieldValue) {
|
||||
List<Point> points = new ArrayList<>();
|
||||
if (fieldValue instanceof List) {
|
||||
List<Object> values = (List<Object>) fieldValue;
|
||||
if (values.size() == 2 && values.get(0) instanceof Number) {
|
||||
GeoPoint geoPoint = GeoUtils.parseGeoPoint(values, true);
|
||||
points.add(new Point(geoPoint.lon(), geoPoint.lat()));
|
||||
} else {
|
||||
for (Object value : values) {
|
||||
GeoPoint geoPoint = GeoUtils.parseGeoPoint(value, true);
|
||||
points.add(new Point(geoPoint.lon(), geoPoint.lat()));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
GeoPoint geoPoint = GeoUtils.parseGeoPoint(fieldValue, true);
|
||||
points.add(new Point(geoPoint.lon(), geoPoint.lat()));
|
||||
}
|
||||
final Geometry queryGeometry;
|
||||
if (points.isEmpty()) {
|
||||
throw new IllegalArgumentException("no geopoints found");
|
||||
} else if (points.size() == 1) {
|
||||
queryGeometry = points.get(0);
|
||||
} else {
|
||||
queryGeometry = new MultiPoint(points);
|
||||
}
|
||||
GeoShapeQueryBuilder shapeQuery = new GeoShapeQueryBuilder(matchField, queryGeometry);
|
||||
shapeQuery.relation(shapeRelation);
|
||||
return shapeQuery;
|
||||
}
|
||||
|
||||
public ShapeRelation getShapeRelation() {
|
||||
return shapeRelation;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
public class MatchProcessor extends AbstractEnrichProcessor {
|
||||
|
||||
MatchProcessor(String tag,
|
||||
Client client,
|
||||
String policyName,
|
||||
String field,
|
||||
String targetField,
|
||||
boolean overrideEnabled,
|
||||
boolean ignoreMissing,
|
||||
String matchField,
|
||||
int maxMatches) {
|
||||
super(tag, client, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches);
|
||||
}
|
||||
|
||||
/** used in tests **/
|
||||
MatchProcessor(String tag,
|
||||
BiConsumer<SearchRequest, BiConsumer<SearchResponse, Exception>> searchRunner,
|
||||
String policyName,
|
||||
String field,
|
||||
String targetField,
|
||||
boolean overrideEnabled,
|
||||
boolean ignoreMissing,
|
||||
String matchField,
|
||||
int maxMatches) {
|
||||
super(tag, searchRunner, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryBuilder getQueryBuilder(Object fieldValue) {
|
||||
if (fieldValue instanceof List) {
|
||||
return new TermsQueryBuilder(matchField, (List) fieldValue);
|
||||
} else {
|
||||
return new TermQueryBuilder(matchField, fieldValue);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,235 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.action;
|
||||
|
||||
import org.apache.logging.log4j.util.BiConsumer;
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats;
|
||||
import org.elasticsearch.xpack.enrich.EnrichPlugin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ArrayBlockingQueue;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
/**
|
||||
* An internal action to locally manage the load of the search requests that originate from the enrich processor.
|
||||
* This is because the enrich processor executes asynchronously and a bulk request could easily overload
|
||||
* the search tp.
|
||||
*/
|
||||
public class EnrichCoordinatorProxyAction extends ActionType<SearchResponse> {
|
||||
|
||||
public static final EnrichCoordinatorProxyAction INSTANCE = new EnrichCoordinatorProxyAction();
|
||||
public static final String NAME = "indices:data/read/xpack/enrich/coordinate_lookups";
|
||||
|
||||
private EnrichCoordinatorProxyAction() {
|
||||
super(NAME, SearchResponse::new);
|
||||
}
|
||||
|
||||
public static class TransportAction extends HandledTransportAction<SearchRequest, SearchResponse> {
|
||||
|
||||
private final Coordinator coordinator;
|
||||
|
||||
@Inject
|
||||
public TransportAction(TransportService transportService, ActionFilters actionFilters, Coordinator coordinator) {
|
||||
super(NAME, transportService, actionFilters, SearchRequest::new);
|
||||
this.coordinator = coordinator;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Task task, SearchRequest request, ActionListener<SearchResponse> listener) {
|
||||
assert Thread.currentThread().getName().contains(ThreadPool.Names.WRITE);
|
||||
coordinator.schedule(request, listener);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Coordinator {
|
||||
|
||||
final BiConsumer<MultiSearchRequest, BiConsumer<MultiSearchResponse, Exception>> lookupFunction;
|
||||
final int maxLookupsPerRequest;
|
||||
final int maxNumberOfConcurrentRequests;
|
||||
final BlockingQueue<Slot> queue;
|
||||
final AtomicInteger remoteRequestsCurrent = new AtomicInteger(0);
|
||||
volatile long remoteRequestsTotal = 0;
|
||||
final AtomicLong executedSearchesTotal = new AtomicLong(0);
|
||||
|
||||
public Coordinator(Client client, Settings settings) {
|
||||
this(
|
||||
lookupFunction(client),
|
||||
EnrichPlugin.COORDINATOR_PROXY_MAX_LOOKUPS_PER_REQUEST.get(settings),
|
||||
EnrichPlugin.COORDINATOR_PROXY_MAX_CONCURRENT_REQUESTS.get(settings),
|
||||
EnrichPlugin.COORDINATOR_PROXY_QUEUE_CAPACITY.get(settings)
|
||||
);
|
||||
}
|
||||
|
||||
Coordinator(BiConsumer<MultiSearchRequest, BiConsumer<MultiSearchResponse, Exception>> lookupFunction,
|
||||
int maxLookupsPerRequest, int maxNumberOfConcurrentRequests, int queueCapacity) {
|
||||
this.lookupFunction = lookupFunction;
|
||||
this.maxLookupsPerRequest = maxLookupsPerRequest;
|
||||
this.maxNumberOfConcurrentRequests = maxNumberOfConcurrentRequests;
|
||||
this.queue = new ArrayBlockingQueue<>(queueCapacity);
|
||||
}
|
||||
|
||||
void schedule(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
|
||||
// Use put(...), because if queue is full then this method will wait until a free slot becomes available
|
||||
// The calling thread here is a write thread (write tp is used by ingest) and
|
||||
// this will create natural back pressure from the enrich processor.
|
||||
// If there are no write threads available then write requests with ingestion will fail with 429 error code.
|
||||
try {
|
||||
queue.put(new Slot(searchRequest, listener));
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new RuntimeException("unable to add item to queue", e);
|
||||
}
|
||||
coordinateLookups();
|
||||
}
|
||||
|
||||
CoordinatorStats getStats(String nodeId) {
|
||||
return new CoordinatorStats(nodeId, queue.size(), remoteRequestsCurrent.get(), remoteRequestsTotal,
|
||||
executedSearchesTotal.get());
|
||||
}
|
||||
|
||||
synchronized void coordinateLookups() {
|
||||
while (queue.isEmpty() == false &&
|
||||
remoteRequestsCurrent.get() < maxNumberOfConcurrentRequests) {
|
||||
|
||||
final List<Slot> slots = new ArrayList<>();
|
||||
queue.drainTo(slots, maxLookupsPerRequest);
|
||||
final MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
|
||||
slots.forEach(slot -> multiSearchRequest.add(slot.searchRequest));
|
||||
|
||||
remoteRequestsCurrent.incrementAndGet();
|
||||
remoteRequestsTotal++;
|
||||
lookupFunction.accept(multiSearchRequest, (response, e) -> {
|
||||
handleResponse(slots, response, e);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void handleResponse(List<Slot> slots, MultiSearchResponse response, Exception e) {
|
||||
remoteRequestsCurrent.decrementAndGet();
|
||||
executedSearchesTotal.addAndGet(slots.size());
|
||||
|
||||
if (response != null) {
|
||||
assert slots.size() == response.getResponses().length;
|
||||
for (int i = 0; i < response.getResponses().length; i++) {
|
||||
MultiSearchResponse.Item responseItem = response.getResponses()[i];
|
||||
Slot slot = slots.get(i);
|
||||
|
||||
if (responseItem.isFailure()) {
|
||||
slot.actionListener.onFailure(responseItem.getFailure());
|
||||
} else {
|
||||
slot.actionListener.onResponse(responseItem.getResponse());
|
||||
}
|
||||
}
|
||||
} else if (e != null) {
|
||||
slots.forEach(slot -> slot.actionListener.onFailure(e));
|
||||
} else {
|
||||
throw new AssertionError("no response and no error");
|
||||
}
|
||||
|
||||
// There may be room to for a new request now that numberOfOutstandingRequests has been decreased:
|
||||
coordinateLookups();
|
||||
}
|
||||
|
||||
static class Slot {
|
||||
|
||||
final SearchRequest searchRequest;
|
||||
final ActionListener<SearchResponse> actionListener;
|
||||
|
||||
Slot(SearchRequest searchRequest, ActionListener<SearchResponse> actionListener) {
|
||||
this.searchRequest = Objects.requireNonNull(searchRequest);
|
||||
this.actionListener = Objects.requireNonNull(actionListener);
|
||||
}
|
||||
}
|
||||
|
||||
static BiConsumer<MultiSearchRequest, BiConsumer<MultiSearchResponse, Exception>> lookupFunction(ElasticsearchClient client) {
|
||||
return (request, consumer) -> {
|
||||
int slot = 0;
|
||||
final Map<String, List<Tuple<Integer, SearchRequest>>> itemsPerIndex = new HashMap<>();
|
||||
for (SearchRequest searchRequest : request.requests()) {
|
||||
List<Tuple<Integer, SearchRequest>> items =
|
||||
itemsPerIndex.computeIfAbsent(searchRequest.indices()[0], k -> new ArrayList<>());
|
||||
items.add(new Tuple<>(slot, searchRequest));
|
||||
slot++;
|
||||
}
|
||||
|
||||
final AtomicInteger counter = new AtomicInteger(0);
|
||||
final ConcurrentMap<String, Tuple<MultiSearchResponse, Exception>> shardResponses = new ConcurrentHashMap<>();
|
||||
for (Map.Entry<String, List<Tuple<Integer, SearchRequest>>> entry : itemsPerIndex.entrySet()) {
|
||||
final String enrichIndexName = entry.getKey();
|
||||
final List<Tuple<Integer, SearchRequest>> enrichIndexRequestsAndSlots = entry.getValue();
|
||||
ActionListener<MultiSearchResponse> listener = ActionListener.wrap(
|
||||
response -> {
|
||||
shardResponses.put(enrichIndexName, new Tuple<>(response, null));
|
||||
if (counter.incrementAndGet() == itemsPerIndex.size()) {
|
||||
consumer.accept(reduce(request.requests().size(), itemsPerIndex, shardResponses), null);
|
||||
}
|
||||
},
|
||||
e -> {
|
||||
shardResponses.put(enrichIndexName, new Tuple<>(null, e));
|
||||
if (counter.incrementAndGet() == itemsPerIndex.size()) {
|
||||
consumer.accept(reduce(request.requests().size(), itemsPerIndex, shardResponses), null);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
MultiSearchRequest mrequest = new MultiSearchRequest();
|
||||
enrichIndexRequestsAndSlots.stream().map(Tuple::v2).forEach(mrequest::add);
|
||||
client.execute(EnrichShardMultiSearchAction.INSTANCE, new EnrichShardMultiSearchAction.Request(mrequest), listener);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static MultiSearchResponse reduce(int numRequest,
|
||||
Map<String, List<Tuple<Integer, SearchRequest>>> itemsPerIndex,
|
||||
Map<String, Tuple<MultiSearchResponse, Exception>> shardResponses) {
|
||||
MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[numRequest];
|
||||
for (Map.Entry<String, Tuple<MultiSearchResponse, Exception>> rspEntry : shardResponses.entrySet()) {
|
||||
List<Tuple<Integer, SearchRequest>> reqSlots = itemsPerIndex.get(rspEntry.getKey());
|
||||
if (rspEntry.getValue().v1() != null) {
|
||||
MultiSearchResponse shardResponse = rspEntry.getValue().v1();
|
||||
for (int i = 0; i < shardResponse.getResponses().length; i++) {
|
||||
int slot = reqSlots.get(i).v1();
|
||||
items[slot] = shardResponse.getResponses()[i];
|
||||
}
|
||||
} else if (rspEntry.getValue().v2() != null) {
|
||||
Exception e = rspEntry.getValue().v2();
|
||||
for (Tuple<Integer, SearchRequest> originSlot : reqSlots) {
|
||||
items[originSlot.v1()] = new MultiSearchResponse.Item(null, e);
|
||||
}
|
||||
} else {
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
return new MultiSearchResponse(items, 1L);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,152 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
|
||||
import org.elasticsearch.action.support.nodes.TransportNodesAction;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This is an internal action that gather coordinator stats from each node with an ingest role in the cluster.
|
||||
* This action is only used via the {@link EnrichStatsAction}.
|
||||
*/
|
||||
public class EnrichCoordinatorStatsAction extends ActionType<EnrichCoordinatorStatsAction.Response> {
|
||||
|
||||
public static final EnrichCoordinatorStatsAction INSTANCE = new EnrichCoordinatorStatsAction();
|
||||
public static final String NAME = "cluster:admin/xpack/enrich/coordinator_stats";
|
||||
|
||||
private EnrichCoordinatorStatsAction() {
|
||||
super(NAME, Response::new);
|
||||
}
|
||||
|
||||
// This always executes on all ingest nodes, hence no node ids need to be provided.
|
||||
public static class Request extends BaseNodesRequest<Request> {
|
||||
|
||||
public Request() {
|
||||
super(new String[0]);
|
||||
}
|
||||
|
||||
Request(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
}
|
||||
}
|
||||
|
||||
public static class NodeRequest extends BaseNodeRequest {
|
||||
|
||||
NodeRequest() {}
|
||||
|
||||
NodeRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class Response extends BaseNodesResponse<NodeResponse> {
|
||||
|
||||
Response(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
}
|
||||
|
||||
Response(ClusterName clusterName, List<NodeResponse> nodes, List<FailedNodeException> failures) {
|
||||
super(clusterName, nodes, failures);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<NodeResponse> readNodesFrom(StreamInput in) throws IOException {
|
||||
return in.readList(NodeResponse::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void writeNodesTo(StreamOutput out, List<NodeResponse> nodes) throws IOException {
|
||||
out.writeList(nodes);
|
||||
}
|
||||
}
|
||||
|
||||
public static class NodeResponse extends BaseNodeResponse {
|
||||
|
||||
private final CoordinatorStats coordinatorStats;
|
||||
|
||||
NodeResponse(DiscoveryNode node, CoordinatorStats coordinatorStats) {
|
||||
super(node);
|
||||
this.coordinatorStats = coordinatorStats;
|
||||
}
|
||||
|
||||
NodeResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
this.coordinatorStats = new CoordinatorStats(in);
|
||||
}
|
||||
|
||||
public CoordinatorStats getCoordinatorStats() {
|
||||
return coordinatorStats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
coordinatorStats.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportNodesAction<Request, Response, NodeRequest, NodeResponse> {
|
||||
|
||||
private final EnrichCoordinatorProxyAction.Coordinator coordinator;
|
||||
|
||||
@Inject
|
||||
public TransportAction(ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
|
||||
ActionFilters actionFilters, EnrichCoordinatorProxyAction.Coordinator coordinator) {
|
||||
super(NAME, threadPool, clusterService, transportService, actionFilters, Request::new, NodeRequest::new,
|
||||
ThreadPool.Names.SAME, NodeResponse.class);
|
||||
this.coordinator = coordinator;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void resolveRequest(Request request, ClusterState clusterState) {
|
||||
DiscoveryNode[] ingestNodes = clusterState.getNodes().getIngestNodes().values().toArray(DiscoveryNode.class);
|
||||
request.setConcreteNodes(ingestNodes);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response newResponse(Request request, List<NodeResponse> nodeResponses, List<FailedNodeException> failures) {
|
||||
return new Response(clusterService.getClusterName(), nodeResponses, failures);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeRequest newNodeRequest(Request request) {
|
||||
return new NodeRequest();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeResponse newNodeResponse(StreamInput in) throws IOException {
|
||||
return new NodeResponse(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeResponse nodeOperation(NodeRequest request) {
|
||||
DiscoveryNode node = clusterService.localNode();
|
||||
return new NodeResponse(node, coordinator.getStats(node.getId()));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,278 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.action;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionType;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
|
||||
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.routing.GroupShardsIterator;
|
||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||
import org.elasticsearch.cluster.routing.Preference;
|
||||
import org.elasticsearch.cluster.routing.ShardIterator;
|
||||
import org.elasticsearch.cluster.routing.ShardsIterator;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This is an internal action, that executes msearch requests for enrich indices in a more efficient manner.
|
||||
* Currently each search request inside a msearch request is executed as a separate search. If many search requests
|
||||
* are targeted to the same shards then there is quite some overhead in executing each search request as a separate
|
||||
* search (multiple search contexts, opening of multiple searchers).
|
||||
*
|
||||
* In case for the enrich processor, searches are always targeting the same single shard indices. This action
|
||||
* handles multi search requests targeting enrich indices more efficiently by executing them in a bulk using the same
|
||||
* searcher and query shard context.
|
||||
*
|
||||
* This action (plus some coordination logic in {@link EnrichCoordinatorProxyAction}) can be removed when msearch can
|
||||
* execute search requests targeted to the same shard more efficiently in a bulk like style.
|
||||
*
|
||||
* Note that this 'msearch' implementation only supports executing a query, pagination and source filtering.
|
||||
* Other search features are not supported, because the enrich processor isn't using these search features.
|
||||
*/
|
||||
public class EnrichShardMultiSearchAction extends ActionType<MultiSearchResponse> {
|
||||
|
||||
public static final EnrichShardMultiSearchAction INSTANCE = new EnrichShardMultiSearchAction();
|
||||
private static final String NAME = "indices:data/read/shard_multi_search";
|
||||
|
||||
private EnrichShardMultiSearchAction() {
|
||||
super(NAME, MultiSearchResponse::new);
|
||||
}
|
||||
|
||||
public static class Request extends SingleShardRequest<Request> {
|
||||
|
||||
private final MultiSearchRequest multiSearchRequest;
|
||||
|
||||
public Request(MultiSearchRequest multiSearchRequest) {
|
||||
super(multiSearchRequest.requests().get(0).indices()[0]);
|
||||
this.multiSearchRequest = multiSearchRequest;
|
||||
assert multiSearchRequest.requests().stream()
|
||||
.map(SearchRequest::indices)
|
||||
.flatMap(Arrays::stream)
|
||||
.distinct()
|
||||
.count() == 1 : "action [" + NAME + "] cannot handle msearch request pointing to multiple indices";
|
||||
assert assertSearchSource();
|
||||
}
|
||||
|
||||
public Request(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
multiSearchRequest = new MultiSearchRequest(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = validateNonNullIndex();
|
||||
if (index.startsWith(EnrichPolicy.ENRICH_INDEX_NAME_BASE) == false) {
|
||||
validationException = ValidateActions.addValidationError("index [" + index + "] is not an enrich index",
|
||||
validationException);
|
||||
}
|
||||
return validationException;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
multiSearchRequest.writeTo(out);
|
||||
}
|
||||
|
||||
MultiSearchRequest getMultiSearchRequest() {
|
||||
return multiSearchRequest;
|
||||
}
|
||||
|
||||
private boolean assertSearchSource() {
|
||||
for (SearchRequest request : multiSearchRequest.requests()) {
|
||||
SearchSourceBuilder copy = copy(request.source());
|
||||
|
||||
// validate that only a from, size, query and source filtering has been provided (other features are not supported):
|
||||
// (first unset, what is supported and then see if there is anything left)
|
||||
copy.query(null);
|
||||
copy.from(0);
|
||||
copy.size(10);
|
||||
copy.fetchSource(null);
|
||||
assert EMPTY_SOURCE.equals(copy) : "search request [" + Strings.toString(copy) +
|
||||
"] is using features that is not supported";
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private SearchSourceBuilder copy(SearchSourceBuilder source) {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
NamedWriteableRegistry registry = new NamedWriteableRegistry(searchModule.getNamedWriteables());
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
source.writeTo(output);
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), registry)) {
|
||||
return new SearchSourceBuilder(in);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private static final SearchSourceBuilder EMPTY_SOURCE = new SearchSourceBuilder()
|
||||
// can't set -1 to indicate not specified
|
||||
.from(0).size(10);
|
||||
}
|
||||
|
||||
public static class TransportAction extends TransportSingleShardAction<Request, MultiSearchResponse> {
|
||||
|
||||
private final IndicesService indicesService;
|
||||
|
||||
@Inject
|
||||
public TransportAction(ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
IndicesService indicesService) {
|
||||
super(NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
|
||||
Request::new, ThreadPool.Names.SEARCH);
|
||||
this.indicesService = indicesService;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Writeable.Reader<MultiSearchResponse> getResponseReader() {
|
||||
return MultiSearchResponse::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean resolveIndex(Request request) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ShardsIterator shards(ClusterState state, InternalRequest request) {
|
||||
String index = request.concreteIndex();
|
||||
IndexRoutingTable indexRouting = state.routingTable().index(index);
|
||||
int numShards = indexRouting.shards().size();
|
||||
if (numShards != 1) {
|
||||
throw new IllegalStateException("index [" + index + "] should have 1 shard, but has " + numShards + " shards");
|
||||
}
|
||||
|
||||
GroupShardsIterator<ShardIterator> result =
|
||||
clusterService.operationRouting().searchShards(state, new String[] {index}, null, Preference.LOCAL.type());
|
||||
return result.get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MultiSearchResponse shardOperation(Request request, ShardId shardId) throws IOException {
|
||||
final IndexService indexService = indicesService.indexService(shardId.getIndex());
|
||||
final IndexShard indexShard = indicesService.getShardOrNull(shardId);
|
||||
try (Engine.Searcher searcher = indexShard.acquireSearcher("enrich_msearch")) {
|
||||
final FieldsVisitor visitor = new FieldsVisitor(true);
|
||||
final QueryShardContext context = indexService.newQueryShardContext(shardId.id(),
|
||||
searcher, () -> {throw new UnsupportedOperationException();}, null);
|
||||
final MapperService mapperService = context.getMapperService();
|
||||
final Text typeText = mapperService.documentMapper().typeText();
|
||||
|
||||
final MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[request.multiSearchRequest.requests().size()];
|
||||
for (int i = 0; i < request.multiSearchRequest.requests().size(); i++) {
|
||||
final SearchSourceBuilder searchSourceBuilder = request.multiSearchRequest.requests().get(i).source();
|
||||
|
||||
final QueryBuilder queryBuilder = searchSourceBuilder.query();
|
||||
final int from = searchSourceBuilder.from();
|
||||
final int size = searchSourceBuilder.size();
|
||||
final FetchSourceContext fetchSourceContext = searchSourceBuilder.fetchSource();
|
||||
|
||||
final Query luceneQuery = queryBuilder.rewrite(context).toQuery(context);
|
||||
final int n = from + size;
|
||||
final TopDocs topDocs = searcher.search(luceneQuery, n, new Sort(SortField.FIELD_DOC));
|
||||
|
||||
final SearchHit[] hits = new SearchHit[topDocs.scoreDocs.length];
|
||||
for (int j = 0; j < topDocs.scoreDocs.length; j++) {
|
||||
final ScoreDoc scoreDoc = topDocs.scoreDocs[j];
|
||||
|
||||
visitor.reset();
|
||||
searcher.doc(scoreDoc.doc, visitor);
|
||||
visitor.postProcess(mapperService);
|
||||
final SearchHit hit = new SearchHit(scoreDoc.doc, visitor.uid().id(), typeText, Collections.emptyMap());
|
||||
hit.sourceRef(filterSource(fetchSourceContext, visitor.source()));
|
||||
hits[j] = hit;
|
||||
}
|
||||
items[i] = new MultiSearchResponse.Item(createSearchResponse(topDocs, hits), null);
|
||||
}
|
||||
return new MultiSearchResponse(items, 1L);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static BytesReference filterSource(FetchSourceContext fetchSourceContext, BytesReference source) throws IOException {
|
||||
if (fetchSourceContext.includes().length == 0 && fetchSourceContext.excludes().length == 0) {
|
||||
return source;
|
||||
}
|
||||
|
||||
Set<String> includes = new HashSet<>(Arrays.asList(fetchSourceContext.includes()));
|
||||
Set<String> excludes = new HashSet<>(Arrays.asList(fetchSourceContext.excludes()));
|
||||
|
||||
XContentBuilder builder =
|
||||
new XContentBuilder(XContentType.SMILE.xContent(), new BytesStreamOutput(source.length()), includes, excludes);
|
||||
XContentParser sourceParser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source, XContentType.SMILE);
|
||||
builder.copyCurrentStructure(sourceParser);
|
||||
return BytesReference.bytes(builder);
|
||||
}
|
||||
|
||||
private static SearchResponse createSearchResponse(TopDocs topDocs, SearchHit[] hits) {
|
||||
SearchHits searchHits = new SearchHits(hits, topDocs.totalHits, 0);
|
||||
return new SearchResponse(
|
||||
new InternalSearchResponse(searchHits, null, null, null, false, null, 0),
|
||||
null, 1, 1, 0, 1L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,157 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.ingest.IngestService;
|
||||
import org.elasticsearch.ingest.PipelineConfiguration;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.AbstractEnrichProcessor;
|
||||
import org.elasticsearch.xpack.enrich.EnrichPolicyLocks;
|
||||
import org.elasticsearch.xpack.enrich.EnrichStore;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class TransportDeleteEnrichPolicyAction extends TransportMasterNodeAction<DeleteEnrichPolicyAction.Request, AcknowledgedResponse> {
|
||||
|
||||
private final EnrichPolicyLocks enrichPolicyLocks;
|
||||
private final IngestService ingestService;
|
||||
private final Client client;
|
||||
// the most lenient we can get in order to not bomb out if no indices are found, which is a valid case
|
||||
// where a user creates and deletes a policy before running execute
|
||||
private static final IndicesOptions LENIENT_OPTIONS = IndicesOptions.fromOptions(true, true, true, true);
|
||||
|
||||
|
||||
@Inject
|
||||
public TransportDeleteEnrichPolicyAction(TransportService transportService,
|
||||
ClusterService clusterService,
|
||||
ThreadPool threadPool,
|
||||
ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Client client,
|
||||
EnrichPolicyLocks enrichPolicyLocks,
|
||||
IngestService ingestService) {
|
||||
super(DeleteEnrichPolicyAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
DeleteEnrichPolicyAction.Request::new, indexNameExpressionResolver);
|
||||
this.client = client;
|
||||
this.enrichPolicyLocks = enrichPolicyLocks;
|
||||
this.ingestService = ingestService;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
protected AcknowledgedResponse newResponse() {
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AcknowledgedResponse read(StreamInput in) throws IOException {
|
||||
return new AcknowledgedResponse(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(DeleteEnrichPolicyAction.Request request, ClusterState state,
|
||||
ActionListener<AcknowledgedResponse> listener) throws Exception {
|
||||
EnrichPolicy policy = EnrichStore.getPolicy(request.getName(), state); // ensure the policy exists first
|
||||
if (policy == null) {
|
||||
throw new ResourceNotFoundException("policy [{}] not found", request.getName());
|
||||
}
|
||||
|
||||
enrichPolicyLocks.lockPolicy(request.getName());
|
||||
try {
|
||||
List<PipelineConfiguration> pipelines = IngestService.getPipelines(state);
|
||||
List<String> pipelinesWithProcessors = new ArrayList<>();
|
||||
|
||||
for (PipelineConfiguration pipelineConfiguration : pipelines) {
|
||||
List<AbstractEnrichProcessor> enrichProcessors =
|
||||
ingestService.getProcessorsInPipeline(pipelineConfiguration.getId(), AbstractEnrichProcessor.class);
|
||||
for (AbstractEnrichProcessor processor : enrichProcessors) {
|
||||
if (processor.getPolicyName().equals(request.getName())) {
|
||||
pipelinesWithProcessors.add(pipelineConfiguration.getId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (pipelinesWithProcessors.isEmpty() == false) {
|
||||
throw new ElasticsearchStatusException("Could not delete policy [{}] because a pipeline is referencing it {}",
|
||||
RestStatus.CONFLICT, request.getName(), pipelinesWithProcessors);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
enrichPolicyLocks.releasePolicy(request.getName());
|
||||
listener.onFailure(e);
|
||||
return;
|
||||
}
|
||||
|
||||
deleteIndicesAndPolicy(request.getName(), ActionListener.wrap(
|
||||
(response) -> {
|
||||
enrichPolicyLocks.releasePolicy(request.getName());
|
||||
listener.onResponse(response);
|
||||
},
|
||||
(exc) -> {
|
||||
enrichPolicyLocks.releasePolicy(request.getName());
|
||||
listener.onFailure(exc);
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
private void deleteIndicesAndPolicy(String name, ActionListener<AcknowledgedResponse> listener) {
|
||||
// delete all enrich indices for this policy
|
||||
DeleteIndexRequest deleteRequest = new DeleteIndexRequest()
|
||||
.indices(EnrichPolicy.getBaseName(name) + "-*")
|
||||
.indicesOptions(LENIENT_OPTIONS);
|
||||
|
||||
client.admin().indices().delete(deleteRequest, ActionListener.wrap(
|
||||
(response) -> {
|
||||
if (response.isAcknowledged() == false) {
|
||||
listener.onFailure(new ElasticsearchStatusException("Could not fetch indices to delete during policy delete of [{}]",
|
||||
RestStatus.INTERNAL_SERVER_ERROR, name));
|
||||
} else {
|
||||
deletePolicy(name, listener);
|
||||
}
|
||||
},
|
||||
(error) -> listener.onFailure(error)
|
||||
));
|
||||
}
|
||||
|
||||
private void deletePolicy(String name, ActionListener<AcknowledgedResponse> listener) {
|
||||
EnrichStore.deletePolicy(name, clusterService, e -> {
|
||||
if (e == null) {
|
||||
listener.onResponse(new AcknowledgedResponse(true));
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(DeleteEnrichPolicyAction.Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.CoordinatorStats;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction.Response.ExecutingPolicy;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class TransportEnrichStatsAction extends TransportMasterNodeAction<EnrichStatsAction.Request, EnrichStatsAction.Response> {
|
||||
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public TransportEnrichStatsAction(TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Client client) {
|
||||
super(EnrichStatsAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
EnrichStatsAction.Request::new, indexNameExpressionResolver);
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected EnrichStatsAction.Response read(StreamInput in) throws IOException {
|
||||
return new EnrichStatsAction.Response(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(EnrichStatsAction.Request request,
|
||||
ClusterState state,
|
||||
ActionListener<EnrichStatsAction.Response> listener) throws Exception {
|
||||
EnrichCoordinatorStatsAction.Request statsRequest = new EnrichCoordinatorStatsAction.Request();
|
||||
ActionListener<EnrichCoordinatorStatsAction.Response> statsListener = ActionListener.wrap(
|
||||
response -> {
|
||||
if (response.hasFailures()) {
|
||||
// Report failures even if some node level requests succeed:
|
||||
Exception failure = null;
|
||||
for (FailedNodeException nodeFailure : response.failures()) {
|
||||
if (failure == null) {
|
||||
failure = nodeFailure;
|
||||
} else {
|
||||
failure.addSuppressed(nodeFailure);
|
||||
}
|
||||
}
|
||||
listener.onFailure(failure);
|
||||
return;
|
||||
}
|
||||
|
||||
List<CoordinatorStats> coordinatorStats = response.getNodes().stream()
|
||||
.map(EnrichCoordinatorStatsAction.NodeResponse::getCoordinatorStats)
|
||||
.sorted(Comparator.comparing(CoordinatorStats::getNodeId))
|
||||
.collect(Collectors.toList());
|
||||
List<ExecutingPolicy> policyExecutionTasks = taskManager.getTasks().values().stream()
|
||||
.filter(t -> t.getAction().equals(ExecuteEnrichPolicyAction.NAME))
|
||||
.map(t -> t.taskInfo(clusterService.localNode().getId(), true))
|
||||
.map(t -> new ExecutingPolicy(t.getDescription(), t))
|
||||
.sorted(Comparator.comparing(ExecutingPolicy::getName))
|
||||
.collect(Collectors.toList());
|
||||
listener.onResponse(new EnrichStatsAction.Response(policyExecutionTasks, coordinatorStats));
|
||||
},
|
||||
listener::onFailure
|
||||
);
|
||||
client.execute(EnrichCoordinatorStatsAction.INSTANCE, statsRequest, statsListener);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(EnrichStatsAction.Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.LoggingTaskListener;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyStatus;
|
||||
import org.elasticsearch.xpack.enrich.EnrichPolicyExecutor;
|
||||
import org.elasticsearch.xpack.enrich.EnrichPolicyLocks;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class TransportExecuteEnrichPolicyAction
|
||||
extends TransportMasterNodeAction<ExecuteEnrichPolicyAction.Request, ExecuteEnrichPolicyAction.Response> {
|
||||
|
||||
private final EnrichPolicyExecutor executor;
|
||||
|
||||
@Inject
|
||||
public TransportExecuteEnrichPolicyAction(Settings settings,
|
||||
Client client,
|
||||
TransportService transportService,
|
||||
ClusterService clusterService,
|
||||
ThreadPool threadPool,
|
||||
ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
EnrichPolicyLocks enrichPolicyLocks) {
|
||||
super(ExecuteEnrichPolicyAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
ExecuteEnrichPolicyAction.Request::new, indexNameExpressionResolver);
|
||||
this.executor = new EnrichPolicyExecutor(settings, clusterService, client, transportService.getTaskManager(), threadPool,
|
||||
new IndexNameExpressionResolver(), enrichPolicyLocks, System::currentTimeMillis);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ExecuteEnrichPolicyAction.Response read(StreamInput in) throws IOException {
|
||||
return new ExecuteEnrichPolicyAction.Response(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(ExecuteEnrichPolicyAction.Request request, ClusterState state,
|
||||
ActionListener<ExecuteEnrichPolicyAction.Response> listener) {
|
||||
if (request.isWaitForCompletion()) {
|
||||
executor.runPolicy(request, new ActionListener<ExecuteEnrichPolicyStatus>() {
|
||||
@Override
|
||||
public void onResponse(ExecuteEnrichPolicyStatus executionStatus) {
|
||||
listener.onResponse(new ExecuteEnrichPolicyAction.Response(executionStatus));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Task executeTask = executor.runPolicy(request, LoggingTaskListener.instance());
|
||||
TaskId taskId = new TaskId(clusterService.localNode().getId(), executeTask.getId());
|
||||
listener.onResponse(new ExecuteEnrichPolicyAction.Response(taskId));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(ExecuteEnrichPolicyAction.Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.enrich.EnrichStore;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class TransportGetEnrichPolicyAction extends TransportMasterNodeReadAction<GetEnrichPolicyAction.Request,
|
||||
GetEnrichPolicyAction.Response> {
|
||||
|
||||
@Inject
|
||||
public TransportGetEnrichPolicyAction(TransportService transportService,
|
||||
ClusterService clusterService,
|
||||
ThreadPool threadPool,
|
||||
ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(GetEnrichPolicyAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
GetEnrichPolicyAction.Request::new, indexNameExpressionResolver);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
protected GetEnrichPolicyAction.Response newResponse() {
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetEnrichPolicyAction.Response read(StreamInput in) throws IOException {
|
||||
return new GetEnrichPolicyAction.Response(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(GetEnrichPolicyAction.Request request,
|
||||
ClusterState state,
|
||||
ActionListener<GetEnrichPolicyAction.Response> listener) throws Exception {
|
||||
Map<String, EnrichPolicy> policies;
|
||||
if (request.getNames() == null || request.getNames().isEmpty()) {
|
||||
policies = EnrichStore.getPolicies(state);
|
||||
} else {
|
||||
policies = new HashMap<>();
|
||||
for (String name: request.getNames()) {
|
||||
if (name.isEmpty() == false) {
|
||||
EnrichPolicy policy = EnrichStore.getPolicy(name, state);
|
||||
if (policy != null) {
|
||||
policies.put(name, policy);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
listener.onResponse(new GetEnrichPolicyAction.Response(policies));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(GetEnrichPolicyAction.Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.action;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.XPackSettings;
|
||||
import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction;
|
||||
import org.elasticsearch.xpack.core.security.SecurityContext;
|
||||
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction;
|
||||
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest;
|
||||
import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse;
|
||||
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
|
||||
import org.elasticsearch.xpack.core.security.support.Exceptions;
|
||||
import org.elasticsearch.xpack.enrich.EnrichStore;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class TransportPutEnrichPolicyAction extends TransportMasterNodeAction<PutEnrichPolicyAction.Request, AcknowledgedResponse> {
|
||||
|
||||
private final XPackLicenseState licenseState;
|
||||
private final SecurityContext securityContext;
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public TransportPutEnrichPolicyAction(Settings settings, TransportService transportService,
|
||||
ClusterService clusterService, ThreadPool threadPool, Client client,
|
||||
XPackLicenseState licenseState, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(PutEnrichPolicyAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
PutEnrichPolicyAction.Request::new, indexNameExpressionResolver);
|
||||
this.licenseState = licenseState;
|
||||
this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ?
|
||||
new SecurityContext(settings, threadPool.getThreadContext()) : null;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
|
||||
protected AcknowledgedResponse newResponse() {
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AcknowledgedResponse read(StreamInput in) throws IOException {
|
||||
return new AcknowledgedResponse(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void masterOperation(PutEnrichPolicyAction.Request request, ClusterState state,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
|
||||
if (licenseState.isAuthAllowed()) {
|
||||
RoleDescriptor.IndicesPrivileges privileges = RoleDescriptor.IndicesPrivileges.builder()
|
||||
.indices(request.getPolicy().getIndices())
|
||||
.privileges("read")
|
||||
.build();
|
||||
|
||||
String username = securityContext.getUser().principal();
|
||||
|
||||
HasPrivilegesRequest privRequest = new HasPrivilegesRequest();
|
||||
privRequest.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]);
|
||||
privRequest.username(username);
|
||||
privRequest.clusterPrivileges(Strings.EMPTY_ARRAY);
|
||||
privRequest.indexPrivileges(privileges);
|
||||
|
||||
ActionListener<HasPrivilegesResponse> wrappedListener = ActionListener.wrap(
|
||||
r -> {
|
||||
if (r.isCompleteMatch()) {
|
||||
putPolicy(request, listener);
|
||||
} else {
|
||||
listener.onFailure(Exceptions.authorizationError("unable to store policy because no indices match with the " +
|
||||
"specified index patterns {}", request.getPolicy().getIndices(), username));
|
||||
}
|
||||
},
|
||||
listener::onFailure);
|
||||
client.execute(HasPrivilegesAction.INSTANCE, privRequest, wrappedListener);
|
||||
} else {
|
||||
putPolicy(request, listener);
|
||||
}
|
||||
}
|
||||
|
||||
private void putPolicy(PutEnrichPolicyAction.Request request, ActionListener<AcknowledgedResponse> listener ) {
|
||||
EnrichStore.putPolicy(request.getName(), request.getPolicy(), clusterService, e -> {
|
||||
if (e == null) {
|
||||
listener.onResponse(new AcknowledgedResponse(true));
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(PutEnrichPolicyAction.Request request, ClusterState state) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.rest;
|
||||
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestDeleteEnrichPolicyAction extends BaseRestHandler {
|
||||
|
||||
public RestDeleteEnrichPolicyAction(final RestController controller) {
|
||||
controller.registerHandler(RestRequest.Method.DELETE, "/_enrich/policy/{name}", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "delete_enrich_policy";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException {
|
||||
final DeleteEnrichPolicyAction.Request request = new DeleteEnrichPolicyAction.Request(restRequest.param("name"));
|
||||
return channel -> client.execute(DeleteEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.rest;
|
||||
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestEnrichStatsAction extends BaseRestHandler {
|
||||
|
||||
public RestEnrichStatsAction(final RestController controller) {
|
||||
controller.registerHandler(RestRequest.Method.GET, "/_enrich/_stats", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "enrich_stats";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException {
|
||||
final EnrichStatsAction.Request request = new EnrichStatsAction.Request();
|
||||
return channel -> client.execute(EnrichStatsAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.rest;
|
||||
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestExecuteEnrichPolicyAction extends BaseRestHandler {
|
||||
|
||||
public RestExecuteEnrichPolicyAction(final RestController controller) {
|
||||
controller.registerHandler(RestRequest.Method.PUT, "/_enrich/policy/{name}/_execute", this);
|
||||
controller.registerHandler(RestRequest.Method.POST, "/_enrich/policy/{name}/_execute", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "execute_enrich_policy";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException {
|
||||
final ExecuteEnrichPolicyAction.Request request = new ExecuteEnrichPolicyAction.Request(restRequest.param("name"));
|
||||
request.setWaitForCompletion(restRequest.paramAsBoolean("wait_for_completion", true));
|
||||
return channel -> client.execute(ExecuteEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.rest;
|
||||
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestGetEnrichPolicyAction extends BaseRestHandler {
|
||||
|
||||
public RestGetEnrichPolicyAction(final RestController controller) {
|
||||
controller.registerHandler(RestRequest.Method.GET, "/_enrich/policy/{name}", this);
|
||||
controller.registerHandler(RestRequest.Method.GET, "/_enrich/policy", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "get_enrich_policy";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException {
|
||||
String[] names = Strings.splitStringByCommaToArray(restRequest.param("name"));
|
||||
final GetEnrichPolicyAction.Request request = new GetEnrichPolicyAction.Request(names);
|
||||
return channel -> client.execute(GetEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich.rest;
|
||||
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestPutEnrichPolicyAction extends BaseRestHandler {
|
||||
|
||||
public RestPutEnrichPolicyAction(final RestController controller) {
|
||||
controller.registerHandler(RestRequest.Method.PUT, "/_enrich/policy/{name}", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "put_enrich_policy";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException {
|
||||
final PutEnrichPolicyAction.Request request = createRequest(restRequest);
|
||||
return channel -> client.execute(PutEnrichPolicyAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
|
||||
static PutEnrichPolicyAction.Request createRequest(RestRequest restRequest) throws IOException {
|
||||
try (XContentParser parser = restRequest.contentOrSourceParamParser()) {
|
||||
return PutEnrichPolicyAction.fromXContent(parser, restRequest.param("name"));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.enrich;
|
||||
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
public abstract class AbstractEnrichTestCase extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return Collections.singletonList(LocalStateEnrich.class);
|
||||
}
|
||||
|
||||
protected AtomicReference<Exception> saveEnrichPolicy(String name, EnrichPolicy policy,
|
||||
ClusterService clusterService) throws InterruptedException {
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicReference<Exception> error = new AtomicReference<>();
|
||||
EnrichStore.putPolicy(name, policy, clusterService, e -> {
|
||||
error.set(e);
|
||||
latch.countDown();
|
||||
});
|
||||
latch.await();
|
||||
return error;
|
||||
}
|
||||
|
||||
protected void deleteEnrichPolicy(String name, ClusterService clusterService) throws Exception {
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicReference<Exception> error = new AtomicReference<>();
|
||||
EnrichStore.deletePolicy(name, clusterService, e -> {
|
||||
error.set(e);
|
||||
latch.countDown();
|
||||
});
|
||||
latch.await();
|
||||
if (error.get() != null){
|
||||
throw error.get();
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue