percolator: remove deprecated percolate and mpercolate apis

This commit is contained in:
Martijn van Groningen 2016-12-22 17:57:47 +01:00
parent 72ec3d2661
commit cb2333dacd
40 changed files with 152 additions and 5962 deletions

View File

@ -36,6 +36,7 @@ way to reindex old indices is to use the `reindex` API.
* <<breaking_60_indices_changes>> * <<breaking_60_indices_changes>>
* <<breaking_60_scripting_changes>> * <<breaking_60_scripting_changes>>
* <<breaking_60_ingest_changes>> * <<breaking_60_ingest_changes>>
* <<breaking_60_percolator_changes>>
include::migrate_6_0/cat.asciidoc[] include::migrate_6_0/cat.asciidoc[]
@ -60,3 +61,5 @@ include::migrate_6_0/indices.asciidoc[]
include::migrate_6_0/scripting.asciidoc[] include::migrate_6_0/scripting.asciidoc[]
include::migrate_6_0/ingest.asciidoc[] include::migrate_6_0/ingest.asciidoc[]
include::migrate_6_0/percolator.asciidoc[]

View File

@ -0,0 +1,6 @@
[[breaking_60_percolator_changes]]
=== Percolator changes
==== Deprecated percolator and mpercolate apis have been removed
Instead the `percolate` query should be used via either the search or msearch apis.

View File

@ -35,11 +35,6 @@ There are several thread pools, but the important ones include:
queue_size of `50`. The maximum size for this pool queue_size of `50`. The maximum size for this pool
is `1 + # of available processors`. is `1 + # of available processors`.
`percolate`::
For percolate operations. Thread pool type is `fixed`
with a size of `# of available processors`,
queue_size of `1000`.
`snapshot`:: `snapshot`::
For snapshot/restore operations. Thread pool type is `scaling` with a For snapshot/restore operations. Thread pool type is `scaling` with a
keep-alive of `5m` and a max of `min(5, (# of available processors)/2)`. keep-alive of `5m` and a max of `min(5, (# of available processors)/2)`.

View File

@ -129,6 +129,4 @@ include::search/explain.asciidoc[]
include::search/profile.asciidoc[] include::search/profile.asciidoc[]
include::search/percolate.asciidoc[]
include::search/field-stats.asciidoc[] include::search/field-stats.asciidoc[]

View File

@ -1,6 +0,0 @@
[[search-percolate]]
== Percolator
deprecated[5.0.0,Percolate and multi percolate APIs are deprecated and have been replaced by the new <<query-dsl-percolate-query,`percolate` query>>]
For indices created on or after version 5.0.0-alpha1 the percolator automatically indexes the query terms with the percolator queries. This allows the percolator to percolate documents more quickly. It is advisable to reindex any pre 5.0.0 indices to take advantage of this new optimization.

View File

@ -1,44 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
@Deprecated
public class MultiPercolateAction extends Action<MultiPercolateRequest, MultiPercolateResponse, MultiPercolateRequestBuilder> {
public static final MultiPercolateAction INSTANCE = new MultiPercolateAction();
public static final String NAME = "indices:data/read/mpercolate";
private MultiPercolateAction() {
super(NAME);
}
@Override
public MultiPercolateResponse newResponse() {
return new MultiPercolateResponse();
}
@Override
public MultiPercolateRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new MultiPercolateRequestBuilder(client, this);
}
}

View File

@ -1,350 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue;
/**
* A multi percolate request that encapsulates multiple {@link PercolateRequest} instances in a single api call.
*
* @deprecated Instead use multi search API with {@link PercolateQueryBuilder}
*/
@Deprecated
public class MultiPercolateRequest extends ActionRequest implements CompositeIndicesRequest {
private String[] indices;
private String documentType;
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed();
private List<PercolateRequest> requests = new ArrayList<>();
/**
* Embeds a percolate request to this multi percolate request
*/
public MultiPercolateRequest add(PercolateRequestBuilder requestBuilder) {
return add(requestBuilder.request());
}
/**
* Embeds a percolate request to this multi percolate request
*/
public MultiPercolateRequest add(PercolateRequest request) {
if (request.indices() == null && indices != null) {
request.indices(indices);
}
if (request.documentType() == null && documentType != null) {
request.documentType(documentType);
}
if (request.indicesOptions() == IndicesOptions.strictExpandOpenAndForbidClosed() && indicesOptions != IndicesOptions.strictExpandOpenAndForbidClosed()) {
request.indicesOptions(indicesOptions);
}
requests.add(request);
return this;
}
/**
* Embeds a percolate request which request body is defined as raw bytes to this multi percolate request
*/
public MultiPercolateRequest add(byte[] data, int from, int length) throws Exception {
return add(new BytesArray(data, from, length), true);
}
/**
* Embeds a percolate request which request body is defined as raw bytes to this multi percolate request
*/
public MultiPercolateRequest add(BytesReference data, boolean allowExplicitIndex) throws IOException {
XContent xContent = XContentFactory.xContent(data);
int from = 0;
int length = data.length();
byte marker = xContent.streamSeparator();
while (true) {
int nextMarker = findNextMarker(marker, from, data, length);
if (nextMarker == -1) {
break;
}
// support first line with \n
if (nextMarker == 0) {
from = nextMarker + 1;
continue;
}
PercolateRequest percolateRequest = new PercolateRequest();
if (indices != null) {
percolateRequest.indices(indices);
}
if (documentType != null) {
percolateRequest.documentType(documentType);
}
if (indicesOptions != IndicesOptions.strictExpandOpenAndForbidClosed()) {
percolateRequest.indicesOptions(indicesOptions);
}
// now parse the action
if (nextMarker - from > 0) {
// EMPTY is safe here because we don't call namedObject
try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, data.slice(from, nextMarker - from))) {
// Move to START_OBJECT, if token is null, its an empty data
XContentParser.Token token = parser.nextToken();
if (token != null) {
// Top level json object
assert token == XContentParser.Token.START_OBJECT;
token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new ElasticsearchParseException("Expected field");
}
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("expected start object");
}
String percolateAction = parser.currentName();
if ("percolate".equals(percolateAction)) {
parsePercolateAction(parser, percolateRequest, allowExplicitIndex);
} else if ("count".equals(percolateAction)) {
percolateRequest.onlyCount(true);
parsePercolateAction(parser, percolateRequest, allowExplicitIndex);
} else {
throw new ElasticsearchParseException("[{}] isn't a supported percolate operation", percolateAction);
}
}
}
}
// move pointers
from = nextMarker + 1;
// now for the body
nextMarker = findNextMarker(marker, from, data, length);
if (nextMarker == -1) {
break;
}
percolateRequest.source(data.slice(from, nextMarker - from));
// move pointers
from = nextMarker + 1;
add(percolateRequest);
}
return this;
}
private void parsePercolateAction(XContentParser parser, PercolateRequest percolateRequest, boolean allowExplicitIndex) throws IOException {
String globalIndex = indices != null && indices.length > 0 ? indices[0] : null;
Map<String, Object> header = parser.map();
if (header.containsKey("id")) {
GetRequest getRequest = new GetRequest(globalIndex);
percolateRequest.getRequest(getRequest);
for (Map.Entry<String, Object> entry : header.entrySet()) {
Object value = entry.getValue();
if ("id".equals(entry.getKey())) {
getRequest.id(nodeStringValue(value, null));
header.put("id", entry.getValue());
} else if ("index".equals(entry.getKey()) || "indices".equals(entry.getKey())) {
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in multi percolate is not allowed");
}
getRequest.index(nodeStringValue(value, null));
} else if ("type".equals(entry.getKey())) {
getRequest.type(nodeStringValue(value, null));
} else if ("preference".equals(entry.getKey())) {
getRequest.preference(nodeStringValue(value, null));
} else if ("routing".equals(entry.getKey())) {
getRequest.routing(nodeStringValue(value, null));
} else if ("percolate_index".equals(entry.getKey()) || "percolate_indices".equals(entry.getKey()) || "percolateIndex".equals(entry.getKey()) || "percolateIndices".equals(entry.getKey())) {
percolateRequest.indices(nodeStringArrayValue(value));
} else if ("percolate_type".equals(entry.getKey()) || "percolateType".equals(entry.getKey())) {
percolateRequest.documentType(nodeStringValue(value, null));
} else if ("percolate_preference".equals(entry.getKey()) || "percolatePreference".equals(entry.getKey())) {
percolateRequest.preference(nodeStringValue(value, null));
} else if ("percolate_routing".equals(entry.getKey()) || "percolateRouting".equals(entry.getKey())) {
percolateRequest.routing(nodeStringValue(value, null));
}
}
// Setting values based on get request, if needed...
if ((percolateRequest.indices() == null || percolateRequest.indices().length == 0) && getRequest.index() != null) {
percolateRequest.indices(getRequest.index());
}
if (percolateRequest.documentType() == null && getRequest.type() != null) {
percolateRequest.documentType(getRequest.type());
}
if (percolateRequest.routing() == null && getRequest.routing() != null) {
percolateRequest.routing(getRequest.routing());
}
if (percolateRequest.preference() == null && getRequest.preference() != null) {
percolateRequest.preference(getRequest.preference());
}
} else {
for (Map.Entry<String, Object> entry : header.entrySet()) {
Object value = entry.getValue();
if ("index".equals(entry.getKey()) || "indices".equals(entry.getKey())) {
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in multi percolate is not allowed");
}
percolateRequest.indices(nodeStringArrayValue(value));
} else if ("type".equals(entry.getKey())) {
percolateRequest.documentType(nodeStringValue(value, null));
} else if ("preference".equals(entry.getKey())) {
percolateRequest.preference(nodeStringValue(value, null));
} else if ("routing".equals(entry.getKey())) {
percolateRequest.routing(nodeStringValue(value, null));
}
}
}
percolateRequest.indicesOptions(IndicesOptions.fromMap(header, indicesOptions));
}
private int findNextMarker(byte marker, int from, BytesReference data, int length) {
for (int i = from; i < length; i++) {
if (data.get(i) == marker) {
return i;
}
}
return -1;
}
/**
* @return The list of already set percolate requests.
*/
public List<PercolateRequest> requests() {
return this.requests;
}
/**
* @return Returns the {@link IndicesOptions} that is used as default for all percolate requests.
*/
public IndicesOptions indicesOptions() {
return indicesOptions;
}
/**
* Sets the {@link IndicesOptions} for all percolate request that don't have this set.
*
* Warning: This should be set before adding any percolate requests. Setting this after adding percolate requests
* will have no effect on any percolate requests already added.
*/
public MultiPercolateRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* @return The default indices for all percolate request.
*/
public String[] indices() {
return indices;
}
/**
* Sets the default indices for any percolate request that doesn't have indices defined.
*
* Warning: This should be set before adding any percolate requests. Setting this after adding percolate requests
* will have no effect on any percolate requests already added.
*/
public MultiPercolateRequest indices(String... indices) {
this.indices = indices;
return this;
}
/**
* @return Sets the default type for all percolate requests
*/
public String documentType() {
return documentType;
}
/**
* Sets the default document type for any percolate request that doesn't have a document type set.
*
* Warning: This should be set before adding any percolate requests. Setting this after adding percolate requests
* will have no effect on any percolate requests already added.
*/
public MultiPercolateRequest documentType(String type) {
this.documentType = type;
return this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (requests.isEmpty()) {
validationException = addValidationError("no requests added", validationException);
}
for (int i = 0; i < requests.size(); i++) {
ActionRequestValidationException ex = requests.get(i).validate();
if (ex != null) {
if (validationException == null) {
validationException = new ActionRequestValidationException();
}
validationException.addValidationErrors(ex.validationErrors());
}
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
indices = in.readStringArray();
documentType = in.readOptionalString();
indicesOptions = IndicesOptions.readIndicesOptions(in);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
PercolateRequest request = new PercolateRequest();
request.readFrom(in);
requests.add(request);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(indices);
out.writeOptionalString(documentType);
indicesOptions.writeIndicesOptions(out);
out.writeVInt(requests.size());
for (PercolateRequest request : requests) {
request.writeTo(out);
}
}
}

View File

@ -1,62 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.ElasticsearchClient;
/**
* A builder for to ease the use of defining a {@link MultiPercolateRequest} instance.
*
* @deprecated Instead use multi search API with {@link PercolateQueryBuilder}
*/
@Deprecated
public class MultiPercolateRequestBuilder extends ActionRequestBuilder<MultiPercolateRequest, MultiPercolateResponse, MultiPercolateRequestBuilder> {
public MultiPercolateRequestBuilder(ElasticsearchClient client, MultiPercolateAction action) {
super(client, action, new MultiPercolateRequest());
}
/**
* Bundles the specified percolate request to the multi percolate request.
*/
public MultiPercolateRequestBuilder add(PercolateRequest percolateRequest) {
request.add(percolateRequest);
return this;
}
/**
* Bundles the specified percolate request build to the multi percolate request.
*/
public MultiPercolateRequestBuilder add(PercolateRequestBuilder percolateRequestBuilder) {
request.add(percolateRequestBuilder);
return this;
}
/**
* Specifies how to globally ignore indices that are not available and how to deal with wildcard indices expressions.
* <p>
* Invoke this method before invoking {@link #add(PercolateRequestBuilder)}.
*/
public MultiPercolateRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
request.indicesOptions(indicesOptions);
return this;
}
}

View File

@ -1,187 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
/**
* Represents the response of a multi percolate request.
*
* Each item represents the response of a percolator request and the order of the items is in the same order as the
* percolator requests were defined in the multi percolate request.
*
* @deprecated Instead use multi search API with {@link PercolateQueryBuilder}
*/
@Deprecated
public class MultiPercolateResponse extends ActionResponse implements Iterable<MultiPercolateResponse.Item>, ToXContentObject {
private Item[] items;
MultiPercolateResponse(Item[] items) {
this.items = items;
}
MultiPercolateResponse() {
this.items = new Item[0];
}
@Override
public Iterator<Item> iterator() {
return Arrays.stream(items).iterator();
}
/**
* Same as {@link #getItems()}
*/
public Item[] items() {
return items;
}
/**
* @return the percolate responses as items.
*/
public Item[] getItems() {
return items;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startArray(Fields.RESPONSES);
for (MultiPercolateResponse.Item item : items) {
if (item.isFailure()) {
builder.startObject();
ElasticsearchException.renderException(builder, params, item.getFailure());
builder.endObject();
} else {
item.getResponse().toXContent(builder, params);
}
}
builder.endArray();
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(items.length);
for (Item item : items) {
item.writeTo(out);
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
items = new Item[size];
for (int i = 0; i < items.length; i++) {
items[i] = new Item();
items[i].readFrom(in);
}
}
/**
* Encapsulates a single percolator response which may contain an error or the actual percolator response itself.
*/
public static class Item implements Streamable {
private PercolateResponse response;
private Exception exception;
Item(PercolateResponse response) {
this.response = response;
}
Item(Exception exception) {
this.exception = exception;
}
Item() {
}
/**
* @return The percolator response or <code>null</code> if there was error.
*/
@Nullable
public PercolateResponse getResponse() {
return response;
}
/**
* @return An error description if there was an error or <code>null</code> if the percolate request was successful
*/
@Nullable
public String getErrorMessage() {
return exception == null ? null : exception.getMessage();
}
/**
* @return <code>true</code> if the percolator request that this item represents failed otherwise
* <code>false</code> is returned.
*/
public boolean isFailure() {
return exception != null;
}
public Exception getFailure() {
return exception;
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.readBoolean()) {
response = new PercolateResponse();
response.readFrom(in);
} else {
exception = in.readException();
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (response != null) {
out.writeBoolean(true);
response.writeTo(out);
} else {
out.writeBoolean(false);
out.writeException(exception);
}
}
}
static final class Fields {
static final String RESPONSES = "responses";
static final String ERROR = "error";
}
}

View File

@ -1,44 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
@Deprecated
public class PercolateAction extends Action<PercolateRequest, PercolateResponse, PercolateRequestBuilder> {
public static final PercolateAction INSTANCE = new PercolateAction();
public static final String NAME = "indices:data/read/percolate";
private PercolateAction() {
super(NAME);
}
@Override
public PercolateResponse newResponse() {
return new PercolateResponse();
}
@Override
public PercolateRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new PercolateRequestBuilder(client, this);
}
}

View File

@ -1,284 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
* A request to execute a percolate operation.
*
* @deprecated Instead use search API with {@link PercolateQueryBuilder}
*/
@Deprecated
public class PercolateRequest extends ActionRequest implements IndicesRequest.Replaceable {
protected String[] indices;
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed();
private String documentType;
private String routing;
private String preference;
private boolean onlyCount;
private GetRequest getRequest;
private BytesReference source;
public String[] indices() {
return indices;
}
public final PercolateRequest indices(String... indices) {
this.indices = indices;
return this;
}
public IndicesOptions indicesOptions() {
return indicesOptions;
}
public PercolateRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* Getter for {@link #documentType(String)}
*/
public String documentType() {
return documentType;
}
/**
* Sets the type of the document to percolate. This is important as it selects the mapping to be used to parse
* the document.
*/
public PercolateRequest documentType(String type) {
this.documentType = type;
return this;
}
/**
* Getter for {@link #routing(String)}
*/
public String routing() {
return routing;
}
/**
* A comma separated list of routing values to control the shards the search will be executed on.
*/
public PercolateRequest routing(String routing) {
this.routing = routing;
return this;
}
/**
* Getter for {@link #preference(String)}
*/
public String preference() {
return preference;
}
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
* <tt>_local</tt> to prefer local shards, <tt>_primary</tt> to execute only on primary shards, or
* a custom value, which guarantees that the same order will be used across different requests.
*/
public PercolateRequest preference(String preference) {
this.preference = preference;
return this;
}
/**
* Getter for {@link #getRequest(GetRequest)}
*/
public GetRequest getRequest() {
return getRequest;
}
/**
* This defines where to fetch the document to be percolated from, which is an alternative of defining the document
* to percolate in the request body.
*
* If this defined than this will override the document specified in the request body.
*/
public PercolateRequest getRequest(GetRequest getRequest) {
this.getRequest = getRequest;
return this;
}
/**
* @return The request body in its raw form.
*/
public BytesReference source() {
return source;
}
/**
* Raw version of {@link #source(PercolateSourceBuilder)}
*/
public PercolateRequest source(Map document) throws ElasticsearchGenerationException {
return source(document, Requests.CONTENT_TYPE);
}
/**
* Raw version of {@link #source(PercolateSourceBuilder)}
*/
@SuppressWarnings("unchecked")
public PercolateRequest source(Map document, XContentType contentType) throws ElasticsearchGenerationException {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.map(document);
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + document + "]", e);
}
}
/**
* Raw version of {@link #source(PercolateSourceBuilder)}
*/
public PercolateRequest source(String document) {
this.source = new BytesArray(document);
return this;
}
/**
* Raw version of {@link #source(PercolateSourceBuilder)}
*/
public PercolateRequest source(XContentBuilder documentBuilder) {
source = documentBuilder.bytes();
return this;
}
/**
* Raw version of {@link #source(PercolateSourceBuilder)}
*/
public PercolateRequest source(byte[] document) {
return source(document, 0, document.length);
}
/**
* Raw version of {@link #source(PercolateSourceBuilder)}
*/
public PercolateRequest source(byte[] source, int offset, int length) {
return source(new BytesArray(source, offset, length));
}
/**
* Raw version of {@link #source(PercolateSourceBuilder)}
*/
public PercolateRequest source(BytesReference source) {
this.source = source;
return this;
}
/**
* Sets the request body definition for this percolate request as raw bytes.
*
* This is the preferred way to set the request body.
*/
public PercolateRequest source(PercolateSourceBuilder sourceBuilder) {
this.source = sourceBuilder.buildAsBytes(Requests.CONTENT_TYPE);
return this;
}
/**
* Getter for {@link #onlyCount(boolean)}
*/
public boolean onlyCount() {
return onlyCount;
}
/**
* Sets whether this percolate request should only count the number of percolator queries that matches with
* the document being percolated and don't keep track of the actual queries that have matched.
*/
public PercolateRequest onlyCount(boolean onlyCount) {
this.onlyCount = onlyCount;
return this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (documentType == null) {
validationException = addValidationError("type is missing", validationException);
}
if (source == null && getRequest == null) {
validationException = addValidationError("source or get is missing", validationException);
}
if (getRequest != null && getRequest.storedFields() != null) {
validationException = addValidationError("get stored fields option isn't supported via percolate request", validationException);
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
documentType = in.readString();
routing = in.readOptionalString();
preference = in.readOptionalString();
source = in.readBytesReference();
if (in.readBoolean()) {
getRequest = new GetRequest();
getRequest.readFrom(in);
}
onlyCount = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(indices);
indicesOptions.writeIndicesOptions(out);
out.writeString(documentType);
out.writeOptionalString(routing);
out.writeOptionalString(preference);
out.writeBytesReference(source);
if (getRequest != null) {
out.writeBoolean(true);
getRequest.writeTo(out);
} else {
out.writeBoolean(false);
}
out.writeBoolean(onlyCount);
}
}

View File

@ -1,274 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import java.util.Map;
/**
* A builder the easy to use of defining a percolate request.
*
* @deprecated Instead use search API with {@link PercolateQueryBuilder}
*/
@Deprecated
public class PercolateRequestBuilder extends ActionRequestBuilder<PercolateRequest, PercolateResponse, PercolateRequestBuilder> {
private PercolateSourceBuilder sourceBuilder;
public PercolateRequestBuilder(ElasticsearchClient client, PercolateAction action) {
super(client, action, new PercolateRequest());
}
public PercolateRequestBuilder setIndices(String... indices) {
request.indices(indices);
return this;
}
public PercolateRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
request.indicesOptions(indicesOptions);
return this;
}
/**
* Sets the type of the document to percolate. This is important as it selects the mapping to be used to parse
* the document.
*/
public PercolateRequestBuilder setDocumentType(String type) {
request.documentType(type);
return this;
}
/**
* A comma separated list of routing values to control the shards the search will be executed on.
*/
public PercolateRequestBuilder setRouting(String routing) {
request.routing(routing);
return this;
}
/**
* List of routing values to control the shards the search will be executed on.
*/
public PercolateRequestBuilder setRouting(String... routings) {
request.routing(Strings.arrayToCommaDelimitedString(routings));
return this;
}
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
* <tt>_local</tt> to prefer local shards, <tt>_primary</tt> to execute only on primary shards, or
* a custom value, which guarantees that the same order will be used across different requests.
*/
public PercolateRequestBuilder setPreference(String preference) {
request.preference(preference);
return this;
}
/**
* Enables percolating an existing document. Instead of specifying the source of the document to percolate, define
* a get request that will fetch a document and use its source.
*/
public PercolateRequestBuilder setGetRequest(GetRequest getRequest) {
request.getRequest(getRequest);
return this;
}
/**
* Whether only to return total count and don't keep track of the matches (Count percolation).
*/
public PercolateRequestBuilder setOnlyCount(boolean onlyCount) {
request.onlyCount(onlyCount);
return this;
}
/**
* Delegates to {@link PercolateSourceBuilder#setSize(int)}}
*/
public PercolateRequestBuilder setSize(int size) {
sourceBuilder().setSize(size);
return this;
}
/**
* Delegates to {@link PercolateSourceBuilder#setSort(boolean)}}
*/
public PercolateRequestBuilder setSortByScore(boolean sort) {
sourceBuilder().setSort(sort);
return this;
}
/**
* Delegates to {@link PercolateSourceBuilder#addSort(SortBuilder)}
*/
public PercolateRequestBuilder addSort(SortBuilder<?> sort) {
sourceBuilder().addSort(sort);
return this;
}
/**
* Delegates to {@link PercolateSourceBuilder#setSort(boolean)}}
*/
public PercolateRequestBuilder setScore(boolean score) {
sourceBuilder().setTrackScores(score);
return this;
}
/**
* Delegates to {@link PercolateSourceBuilder#setDoc(PercolateSourceBuilder.DocBuilder)}
*/
public PercolateRequestBuilder setPercolateDoc(PercolateSourceBuilder.DocBuilder docBuilder) {
sourceBuilder().setDoc(docBuilder);
return this;
}
/**
* Delegates to {@link PercolateSourceBuilder#setQueryBuilder(QueryBuilder)}
*/
public PercolateRequestBuilder setPercolateQuery(QueryBuilder queryBuilder) {
sourceBuilder().setQueryBuilder(queryBuilder);
return this;
}
/**
* Delegates to {@link PercolateSourceBuilder#setHighlightBuilder(HighlightBuilder)}
*/
public PercolateRequestBuilder setHighlightBuilder(HighlightBuilder highlightBuilder) {
sourceBuilder().setHighlightBuilder(highlightBuilder);
return this;
}
/**
* Delegates to
* {@link PercolateSourceBuilder#addAggregation(AggregationBuilder)}
*/
public PercolateRequestBuilder addAggregation(AggregationBuilder aggregationBuilder) {
sourceBuilder().addAggregation(aggregationBuilder);
return this;
}
/**
* Delegates to
* {@link PercolateSourceBuilder#addAggregation(PipelineAggregationBuilder)}
*/
public PercolateRequestBuilder addAggregation(PipelineAggregationBuilder aggregationBuilder) {
sourceBuilder().addAggregation(aggregationBuilder);
return this;
}
/**
* Sets the percolate request definition directly on the request. This will
* overwrite any definitions set by any of the delegate methods.
*/
public PercolateRequestBuilder setSource(PercolateSourceBuilder source) {
sourceBuilder = source;
return this;
}
/**
* Raw variant of {@link #setSource(PercolateSourceBuilder)}
*/
public PercolateRequestBuilder setSource(Map<String, Object> source) {
request.source(source);
return this;
}
/**
* Raw variant of {@link #setSource(PercolateSourceBuilder)}
*/
public PercolateRequestBuilder setSource(Map<String, Object> source, XContentType contentType) {
request.source(source, contentType);
return this;
}
/**
* Raw variant of {@link #setSource(PercolateSourceBuilder)}
*/
public PercolateRequestBuilder setSource(String source) {
request.source(source);
return this;
}
/**
* Raw variant of {@link #setSource(PercolateSourceBuilder)}
*/
public PercolateRequestBuilder setSource(XContentBuilder sourceBuilder) {
request.source(sourceBuilder);
return this;
}
/**
* Raw variant of {@link #setSource(PercolateSourceBuilder)}
*/
public PercolateRequestBuilder setSource(BytesReference source) {
request.source(source);
return this;
}
/**
* Raw variant of {@link #setSource(PercolateSourceBuilder)}
*/
public PercolateRequestBuilder setSource(byte[] source) {
request.source(source);
return this;
}
/**
* Raw variant of {@link #setSource(PercolateSourceBuilder)}
*/
public PercolateRequestBuilder setSource(byte[] source, int offset, int length) {
request.source(source, offset, length);
return this;
}
private PercolateSourceBuilder sourceBuilder() {
if (sourceBuilder == null) {
sourceBuilder = new PercolateSourceBuilder();
}
return sourceBuilder;
}
@Override
public PercolateRequest request() {
if (sourceBuilder != null) {
request.source(sourceBuilder);
}
return request;
}
@Override
protected PercolateRequest beforeExecute(PercolateRequest request) {
if (sourceBuilder != null) {
request.source(sourceBuilder);
}
return request;
}
}

View File

@ -1,297 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Encapsulates the response of a percolator request.
*
* @deprecated Instead use search API with {@link PercolateQueryBuilder}
*/
@Deprecated
public class PercolateResponse extends BroadcastResponse implements Iterable<PercolateResponse.Match>, ToXContentObject {
public static final Match[] EMPTY = new Match[0];
// PercolateQuery emits this score if no 'query' is defined in the percolate request
public static final float NO_SCORE = 0.0f;
private long tookInMillis;
private Match[] matches;
private long count;
private InternalAggregations aggregations;
PercolateResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures,
Match[] matches, long count, long tookInMillis, InternalAggregations aggregations) {
super(totalShards, successfulShards, failedShards, shardFailures);
if (tookInMillis < 0) {
throw new IllegalArgumentException("tookInMillis must be positive but was: " + tookInMillis);
}
this.tookInMillis = tookInMillis;
this.matches = matches;
this.count = count;
this.aggregations = aggregations;
}
PercolateResponse() {
}
/**
* How long the percolate took.
*/
public TimeValue getTook() {
return new TimeValue(tookInMillis);
}
/**
* How long the percolate took in milliseconds.
*/
public long getTookInMillis() {
return tookInMillis;
}
/**
* @return The queries that match with the document being percolated. This can return <code>null</code> if th.
*/
public Match[] getMatches() {
return this.matches;
}
/**
* @return The total number of queries that have matched with the document being percolated.
*/
public long getCount() {
return count;
}
/**
* @return Any aggregations that has been executed on the query metadata. This can return <code>null</code>.
*/
public InternalAggregations getAggregations() {
return aggregations;
}
@Override
public Iterator<Match> iterator() {
return Arrays.asList(matches).iterator();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerToXContent(builder, params);
builder.endObject();
return builder;
}
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.TOOK, tookInMillis);
RestActions.buildBroadcastShardsHeader(builder, params, this);
builder.field(Fields.TOTAL, count);
if (matches != null) {
builder.startArray(Fields.MATCHES);
boolean justIds = "ids".equals(params.param("percolate_format"));
if (justIds) {
for (PercolateResponse.Match match : matches) {
builder.value(match.getId());
}
} else {
for (PercolateResponse.Match match : matches) {
builder.startObject();
builder.field(Fields._INDEX, match.getIndex());
builder.field(Fields._ID, match.getId());
float score = match.getScore();
if (score != NO_SCORE) {
builder.field(Fields._SCORE, match.getScore());
}
if (match.getHighlightFields().isEmpty() == false) {
builder.startObject(Fields.HIGHLIGHT);
for (HighlightField field : match.getHighlightFields().values()) {
field.toXContent(builder, params);
}
builder.endObject();
}
builder.endObject();
}
}
builder.endArray();
}
if (aggregations != null) {
aggregations.toXContent(builder, params);
}
return builder;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
tookInMillis = in.readVLong();
count = in.readVLong();
int size = in.readVInt();
if (size != -1) {
matches = new Match[size];
for (int i = 0; i < size; i++) {
matches[i] = new Match();
matches[i].readFrom(in);
}
}
aggregations = InternalAggregations.readOptionalAggregations(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVLong(tookInMillis);
out.writeVLong(count);
if (matches == null) {
out.writeVInt(-1);
} else {
out.writeVInt(matches.length);
for (Match match : matches) {
match.writeTo(out);
}
}
out.writeOptionalStreamable(aggregations);
}
/**
* Represents a query that has matched with the document that was percolated.
*/
public static class Match implements Streamable {
private Text index;
private Text id;
private float score;
private Map<String, HighlightField> hl;
/**
* Constructor only for internal usage.
*/
public Match(Text index, Text id, float score, Map<String, HighlightField> hl) {
this.id = id;
this.score = score;
this.index = index;
this.hl = hl;
}
/**
* Constructor only for internal usage.
*/
public Match(Text index, Text id, float score) {
this.id = id;
this.score = score;
this.index = index;
}
Match() {
}
/**
* @return The index that the matched percolator query resides in.
*/
public Text getIndex() {
return index;
}
/**
* @return The id of the matched percolator query.
*/
public Text getId() {
return id;
}
/**
* @return If in the percolate request a query was specified this returns the score representing how well that
* query matched on the metadata associated with the matching query otherwise {@link Float#NaN} is returned.
*/
public float getScore() {
return score;
}
/**
* @return If highlighting was specified in the percolate request the this returns highlight snippets for each
* matching field in the document being percolated based on this query otherwise <code>null</code> is returned.
*/
@Nullable
public Map<String, HighlightField> getHighlightFields() {
return hl;
}
@Override
public void readFrom(StreamInput in) throws IOException {
id = in.readText();
index = in.readText();
score = in.readFloat();
int size = in.readVInt();
if (size > 0) {
hl = new HashMap<>(size);
for (int j = 0; j < size; j++) {
hl.put(in.readString(), HighlightField.readHighlightField(in));
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeText(id);
out.writeText(index);
out.writeFloat(score);
if (hl != null) {
out.writeVInt(hl.size());
for (Map.Entry<String, HighlightField> entry : hl.entrySet()) {
out.writeString(entry.getKey());
entry.getValue().writeTo(out);
}
} else {
out.writeVInt(0);
}
}
}
static final class Fields {
static final String TOOK = "took";
static final String TOTAL = "total";
static final String MATCHES = "matches";
static final String _INDEX = "_index";
static final String _ID = "_id";
static final String _SCORE = "_score";
static final String HIGHLIGHT = "highlight";
}
}

View File

@ -1,266 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Builder to create the percolate request body.
*
* @deprecated Instead use search API with {@link PercolateQueryBuilder}
*/
@Deprecated
public class PercolateSourceBuilder extends ToXContentToBytes {
private DocBuilder docBuilder;
private QueryBuilder queryBuilder;
private Integer size;
private List<SortBuilder<?>> sorts;
private Boolean trackScores;
private HighlightBuilder highlightBuilder;
private List<AggregationBuilder> aggregationBuilders;
private List<PipelineAggregationBuilder> pipelineAggregationBuilders;
/**
* Sets the document to run the percolate queries against.
*/
public PercolateSourceBuilder setDoc(DocBuilder docBuilder) {
this.docBuilder = docBuilder;
return this;
}
/**
* Sets a query to reduce the number of percolate queries to be evaluated and score the queries that match based
* on this query.
*/
public PercolateSourceBuilder setQueryBuilder(QueryBuilder queryBuilder) {
this.queryBuilder = queryBuilder;
return this;
}
/**
* Limits the maximum number of percolate query matches to be returned.
*/
public PercolateSourceBuilder setSize(int size) {
this.size = size;
return this;
}
/**
* Similar as {@link #setTrackScores(boolean)}, but whether to sort by the score descending.
*/
public PercolateSourceBuilder setSort(boolean sort) {
if (sort) {
addSort(new ScoreSortBuilder());
} else {
this.sorts = null;
}
return this;
}
/**
* Adds a sort builder. Only sorting by score desc is supported.
*
* By default the matching percolator queries are returned in an undefined order.
*/
public PercolateSourceBuilder addSort(SortBuilder<?> sort) {
if (sorts == null) {
sorts = new ArrayList<>();
}
sorts.add(sort);
return this;
}
/**
* Whether to compute a score for each match and include it in the response. The score is based on
* {@link #setQueryBuilder(QueryBuilder)}.
*/
public PercolateSourceBuilder setTrackScores(boolean trackScores) {
this.trackScores = trackScores;
return this;
}
/**
* Enables highlighting for the percolate document. Per matched percolate query highlight the percolate document.
*/
public PercolateSourceBuilder setHighlightBuilder(HighlightBuilder highlightBuilder) {
this.highlightBuilder = highlightBuilder;
return this;
}
/**
* Add an aggregation definition.
*/
public PercolateSourceBuilder addAggregation(AggregationBuilder aggregationBuilder) {
if (aggregationBuilders == null) {
aggregationBuilders = new ArrayList<>();
}
aggregationBuilders.add(aggregationBuilder);
return this;
}
/**
* Add an aggregation definition.
*/
public PercolateSourceBuilder addAggregation(PipelineAggregationBuilder aggregationBuilder) {
if (pipelineAggregationBuilders == null) {
pipelineAggregationBuilders = new ArrayList<>();
}
pipelineAggregationBuilders.add(aggregationBuilder);
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (docBuilder != null) {
docBuilder.toXContent(builder, params);
}
if (queryBuilder != null) {
builder.field("query");
queryBuilder.toXContent(builder, params);
}
if (size != null) {
builder.field("size", size);
}
if (sorts != null) {
builder.startArray("sort");
for (SortBuilder<?> sort : sorts) {
sort.toXContent(builder, params);
}
builder.endArray();
}
if (trackScores != null) {
builder.field("track_scores", trackScores);
}
if (highlightBuilder != null) {
builder.field(SearchSourceBuilder.HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder);
}
if (aggregationBuilders != null || pipelineAggregationBuilders != null) {
builder.field("aggregations");
builder.startObject();
if (aggregationBuilders != null) {
for (AggregationBuilder aggregation : aggregationBuilders) {
aggregation.toXContent(builder, params);
}
}
if (pipelineAggregationBuilders != null) {
for (PipelineAggregationBuilder aggregation : pipelineAggregationBuilders) {
aggregation.toXContent(builder, params);
}
}
builder.endObject();
}
builder.endObject();
return builder;
}
/**
* @return A new {@link DocBuilder} instance.
*/
public static DocBuilder docBuilder() {
return new DocBuilder();
}
/**
* A builder for defining the document to be percolated in various ways.
*/
public static class DocBuilder implements ToXContent {
private BytesReference doc;
/**
* Sets the document to be percolated.
*/
public DocBuilder setDoc(BytesReference doc) {
this.doc = doc;
return this;
}
/**
* Sets the document to be percolated.
*/
public DocBuilder setDoc(String field, Object value) {
Map<String, Object> values = new HashMap<>(2);
values.put(field, value);
setDoc(values);
return this;
}
/**
* Sets the document to be percolated.
*/
public DocBuilder setDoc(String doc) {
this.doc = new BytesArray(doc);
return this;
}
/**
* Sets the document to be percolated.
*/
public DocBuilder setDoc(XContentBuilder doc) {
this.doc = doc.bytes();
return this;
}
/**
* Sets the document to be percolated.
*/
public DocBuilder setDoc(Map doc) {
return setDoc(doc, Requests.CONTENT_TYPE);
}
@SuppressWarnings("unchecked")
public DocBuilder setDoc(Map doc, XContentType contentType) {
try {
return setDoc(XContentFactory.contentBuilder(contentType).map(doc));
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + doc + "]", e);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.rawField("doc", doc);
}
}
}

View File

@ -19,26 +19,21 @@
package org.elasticsearch.percolator; package org.elasticsearch.percolator;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlugin, SearchPlugin { public class PercolatorPlugin extends Plugin implements MapperPlugin, SearchPlugin {
private final Settings settings; private final Settings settings;
@ -46,17 +41,6 @@ public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlug
this.settings = settings; this.settings = settings;
} }
@Override
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
return Arrays.asList(new ActionHandler<>(PercolateAction.INSTANCE, TransportPercolateAction.class),
new ActionHandler<>(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class));
}
@Override
public List<Class<? extends RestHandler>> getRestHandlers() {
return Arrays.asList(RestPercolateAction.class, RestMultiPercolateAction.class);
}
@Override @Override
public List<QuerySpec<?>> getQueries() { public List<QuerySpec<?>> getQueries() {
return singletonList(new QuerySpec<>(PercolateQueryBuilder.NAME, PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent)); return singletonList(new QuerySpec<>(PercolateQueryBuilder.NAME, PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent));

View File

@ -1,66 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener;
import java.io.IOException;
import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST;
@Deprecated
public class RestMultiPercolateAction extends BaseRestHandler {
private final boolean allowExplicitIndex;
@Inject
public RestMultiPercolateAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(POST, "/_mpercolate", this);
controller.registerHandler(POST, "/{index}/_mpercolate", this);
controller.registerHandler(POST, "/{index}/{type}/_mpercolate", this);
controller.registerHandler(GET, "/_mpercolate", this);
controller.registerHandler(GET, "/{index}/_mpercolate", this);
controller.registerHandler(GET, "/{index}/{type}/_mpercolate", this);
this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings);
}
@Override
public RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException {
MultiPercolateRequest multiPercolateRequest = new MultiPercolateRequest();
multiPercolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, multiPercolateRequest.indicesOptions()));
multiPercolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("index")));
multiPercolateRequest.documentType(restRequest.param("type"));
multiPercolateRequest.add(restRequest.contentOrSourceParam(), allowExplicitIndex);
return channel -> client.execute(MultiPercolateAction.INSTANCE, multiPercolateRequest, new RestToXContentListener<>(channel));
}
}

View File

@ -1,152 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.rest.action.RestToXContentListener;
import java.io.IOException;
import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST;
@Deprecated
public class RestPercolateAction extends BaseRestHandler {
@Inject
public RestPercolateAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(GET, "/{index}/{type}/_percolate", this);
controller.registerHandler(POST, "/{index}/{type}/_percolate", this);
RestPercolateExistingDocHandler existingDocHandler = new RestPercolateExistingDocHandler(settings);
controller.registerHandler(GET, "/{index}/{type}/{id}/_percolate", existingDocHandler);
controller.registerHandler(POST, "/{index}/{type}/{id}/_percolate", existingDocHandler);
RestCountPercolateDocHandler countHandler = new RestCountPercolateDocHandler(settings);
controller.registerHandler(GET, "/{index}/{type}/_percolate/count", countHandler);
controller.registerHandler(POST, "/{index}/{type}/_percolate/count", countHandler);
RestCountPercolateExistingDocHandler countExistingDocHandler = new RestCountPercolateExistingDocHandler(settings);
controller.registerHandler(GET, "/{index}/{type}/{id}/_percolate/count", countExistingDocHandler);
controller.registerHandler(POST, "/{index}/{type}/{id}/_percolate/count", countExistingDocHandler);
}
private RestChannelConsumer parseDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, NodeClient client) {
percolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("index")));
percolateRequest.documentType(restRequest.param("type"));
percolateRequest.routing(restRequest.param("routing"));
percolateRequest.preference(restRequest.param("preference"));
percolateRequest.source(restRequest.contentOrSourceParam());
percolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, percolateRequest.indicesOptions()));
return channel -> executePercolate(client, percolateRequest, channel);
}
private RestChannelConsumer parseExistingDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, NodeClient client) {
String index = restRequest.param("index");
String type = restRequest.param("type");
percolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("percolate_index", index)));
percolateRequest.documentType(restRequest.param("percolate_type", type));
GetRequest getRequest = new GetRequest(index, type,
restRequest.param("id"));
getRequest.routing(restRequest.param("routing"));
getRequest.preference(restRequest.param("preference"));
getRequest.refresh(restRequest.paramAsBoolean("refresh", getRequest.refresh()));
getRequest.realtime(restRequest.paramAsBoolean("realtime", getRequest.realtime()));
getRequest.version(RestActions.parseVersion(restRequest));
getRequest.versionType(VersionType.fromString(restRequest.param("version_type"), getRequest.versionType()));
percolateRequest.getRequest(getRequest);
percolateRequest.routing(restRequest.param("percolate_routing"));
percolateRequest.preference(restRequest.param("percolate_preference"));
percolateRequest.source(restRequest.contentOrSourceParam());
percolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, percolateRequest.indicesOptions()));
return channel -> executePercolate(client, percolateRequest, channel);
}
private void executePercolate(final NodeClient client, final PercolateRequest percolateRequest, final RestChannel restChannel) {
client.execute(PercolateAction.INSTANCE, percolateRequest, new RestToXContentListener<>(restChannel));
}
@Override
public RestChannelConsumer prepareRequest(RestRequest restRequest, final NodeClient client) throws IOException {
PercolateRequest percolateRequest = new PercolateRequest();
return parseDocPercolate(percolateRequest, restRequest, client);
}
private final class RestCountPercolateDocHandler extends BaseRestHandler {
private RestCountPercolateDocHandler(Settings settings) {
super(settings);
}
@Override
public RestChannelConsumer prepareRequest(RestRequest restRequest, final NodeClient client) throws IOException {
PercolateRequest percolateRequest = new PercolateRequest();
percolateRequest.onlyCount(true);
return parseDocPercolate(percolateRequest, restRequest, client);
}
}
private final class RestPercolateExistingDocHandler extends BaseRestHandler {
RestPercolateExistingDocHandler(Settings settings) {
super(settings);
}
@Override
public RestChannelConsumer prepareRequest(RestRequest restRequest, final NodeClient client) throws IOException {
PercolateRequest percolateRequest = new PercolateRequest();
return parseExistingDocPercolate(percolateRequest, restRequest, client);
}
}
private final class RestCountPercolateExistingDocHandler extends BaseRestHandler {
RestCountPercolateExistingDocHandler(Settings settings) {
super(settings);
}
@Override
public RestChannelConsumer prepareRequest(RestRequest restRequest, final NodeClient client) throws IOException {
PercolateRequest percolateRequest = new PercolateRequest();
percolateRequest.onlyCount(true);
return parseExistingDocPercolate(percolateRequest, restRequest, client);
}
}
}

View File

@ -1,189 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Deprecated
public class TransportMultiPercolateAction extends HandledTransportAction<MultiPercolateRequest, MultiPercolateResponse> {
private final Client client;
private final SearchRequestParsers searchRequestParsers;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportMultiPercolateAction(Settings settings, ThreadPool threadPool, TransportService transportService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
Client client, SearchRequestParsers searchRequestParsers, NamedXContentRegistry xContentRegistry) {
super(settings, MultiPercolateAction.NAME, threadPool, transportService, actionFilters,
indexNameExpressionResolver, MultiPercolateRequest::new);
this.client = client;
this.searchRequestParsers = searchRequestParsers;
this.xContentRegistry = xContentRegistry;
}
@Override
protected void doExecute(MultiPercolateRequest request, ActionListener<MultiPercolateResponse> listener) {
List<Tuple<Integer, GetRequest>> getRequests = new ArrayList<>();
for (int i = 0; i < request.requests().size(); i++) {
GetRequest getRequest = request.requests().get(i).getRequest();
if (getRequest != null) {
getRequests.add(new Tuple<>(i, getRequest));
}
}
if (getRequests.isEmpty()) {
innerDoExecute(request, listener, Collections.emptyMap(), new HashMap<>());
} else {
MultiGetRequest multiGetRequest = new MultiGetRequest();
for (Tuple<Integer, GetRequest> tuple : getRequests) {
GetRequest getRequest = tuple.v2();
multiGetRequest.add(new MultiGetRequest.Item(getRequest.index(), getRequest.type(), getRequest.id()));
}
client.multiGet(multiGetRequest, new ActionListener<MultiGetResponse>() {
@Override
public void onResponse(MultiGetResponse response) {
Map<Integer, BytesReference> getResponseSources = new HashMap<>(response.getResponses().length);
Map<Integer, MultiPercolateResponse.Item> preFailures = new HashMap<>();
for (int i = 0; i < response.getResponses().length; i++) {
MultiGetItemResponse itemResponse = response.getResponses()[i];
int originalSlot = getRequests.get(i).v1();
if (itemResponse.isFailed()) {
preFailures.put(originalSlot, new MultiPercolateResponse.Item(itemResponse.getFailure().getFailure()));
} else {
if (itemResponse.getResponse().isExists()) {
getResponseSources.put(originalSlot, itemResponse.getResponse().getSourceAsBytesRef());
} else {
GetRequest getRequest = getRequests.get(i).v2();
preFailures.put(originalSlot, new MultiPercolateResponse.Item(new ResourceNotFoundException("percolate document [{}/{}/{}] doesn't exist", getRequest.index(), getRequest.type(), getRequest.id())));
}
}
}
innerDoExecute(request, listener, getResponseSources, preFailures);
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
}
}
private void innerDoExecute(MultiPercolateRequest request, ActionListener<MultiPercolateResponse> listener, Map<Integer, BytesReference> getResponseSources, Map<Integer, MultiPercolateResponse.Item> preFailures) {
try {
MultiSearchRequest multiSearchRequest = createMultiSearchRequest(request, getResponseSources, preFailures);
if (multiSearchRequest.requests().isEmpty()) {
// we may failed to turn all percolate requests into search requests,
// in that case just return the response...
listener.onResponse(
createMultiPercolateResponse(new MultiSearchResponse(new MultiSearchResponse.Item[0]), request, preFailures)
);
} else {
client.multiSearch(multiSearchRequest, new ActionListener<MultiSearchResponse>() {
@Override
public void onResponse(MultiSearchResponse response) {
try {
listener.onResponse(createMultiPercolateResponse(response, request, preFailures));
} catch (Exception e) {
onFailure(e);
}
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
}
} catch (Exception e) {
listener.onFailure(e);
}
}
private MultiSearchRequest createMultiSearchRequest(MultiPercolateRequest multiPercolateRequest, Map<Integer, BytesReference> getResponseSources, Map<Integer, MultiPercolateResponse.Item> preFailures) throws IOException {
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
multiSearchRequest.indicesOptions(multiPercolateRequest.indicesOptions());
for (int i = 0; i < multiPercolateRequest.requests().size(); i++) {
if (preFailures.keySet().contains(i)) {
continue;
}
PercolateRequest percolateRequest = multiPercolateRequest.requests().get(i);
BytesReference docSource = getResponseSources.get(i);
try {
SearchRequest searchRequest = TransportPercolateAction.createSearchRequest(percolateRequest, docSource, xContentRegistry,
parseFieldMatcher);
multiSearchRequest.add(searchRequest);
} catch (Exception e) {
preFailures.put(i, new MultiPercolateResponse.Item(e));
}
}
return multiSearchRequest;
}
private MultiPercolateResponse createMultiPercolateResponse(MultiSearchResponse multiSearchResponse, MultiPercolateRequest request, Map<Integer, MultiPercolateResponse.Item> preFailures) {
int searchResponseIndex = 0;
MultiPercolateResponse.Item[] percolateItems = new MultiPercolateResponse.Item[request.requests().size()];
for (int i = 0; i < percolateItems.length; i++) {
if (preFailures.keySet().contains(i)) {
percolateItems[i] = preFailures.get(i);
} else {
MultiSearchResponse.Item searchItem = multiSearchResponse.getResponses()[searchResponseIndex++];
if (searchItem.isFailure()) {
percolateItems[i] = new MultiPercolateResponse.Item(searchItem.getFailure());
} else {
PercolateRequest percolateRequest = request.requests().get(i);
percolateItems[i] = new MultiPercolateResponse.Item(TransportPercolateAction.createPercolateResponse(searchItem.getResponse(), percolateRequest.onlyCount()));
}
}
}
return new MultiPercolateResponse(percolateItems);
}
}

View File

@ -1,261 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@Deprecated
public class TransportPercolateAction extends HandledTransportAction<PercolateRequest, PercolateResponse> {
private final Client client;
private final SearchRequestParsers searchRequestParsers;
private final NamedXContentRegistry xContentRegistry;
@Inject
public TransportPercolateAction(Settings settings, ThreadPool threadPool, TransportService transportService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
Client client, SearchRequestParsers searchRequestParsers, NamedXContentRegistry xContentRegistry) {
super(settings, PercolateAction.NAME, threadPool, transportService, actionFilters,
indexNameExpressionResolver, PercolateRequest::new);
this.client = client;
this.searchRequestParsers = searchRequestParsers;
this.xContentRegistry = xContentRegistry;
}
@Override
protected void doExecute(PercolateRequest request, ActionListener<PercolateResponse> listener) {
if (request.getRequest() != null) {
client.get(request.getRequest(), new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getResponse) {
if (getResponse.isExists()) {
innerDoExecute(request, getResponse.getSourceAsBytesRef(), listener);
} else {
onFailure(new ResourceNotFoundException("percolate document [{}/{}/{}] doesn't exist",
request.getRequest().index(), request.getRequest().type(), request.getRequest().id()));
}
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
} else {
innerDoExecute(request, null, listener);
}
}
private void innerDoExecute(PercolateRequest request, BytesReference docSource, ActionListener<PercolateResponse> listener) {
SearchRequest searchRequest;
try {
searchRequest = createSearchRequest(request, docSource, xContentRegistry, parseFieldMatcher);
} catch (IOException e) {
listener.onFailure(e);
return;
}
client.search(searchRequest, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse searchResponse) {
try {
listener.onResponse(createPercolateResponse(searchResponse, request.onlyCount()));
} catch (Exception e) {
onFailure(e);
}
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
}
public static SearchRequest createSearchRequest(PercolateRequest percolateRequest, BytesReference documentSource,
NamedXContentRegistry xContentRegistry,
ParseFieldMatcher parseFieldMatcher)
throws IOException {
SearchRequest searchRequest = new SearchRequest();
if (percolateRequest.indices() != null) {
searchRequest.indices(percolateRequest.indices());
}
searchRequest.indicesOptions(percolateRequest.indicesOptions());
searchRequest.routing(percolateRequest.routing());
searchRequest.preference(percolateRequest.preference());
BytesReference querySource = null;
XContentBuilder searchSource = XContentFactory.jsonBuilder().startObject();
if (percolateRequest.source() != null && percolateRequest.source().length() > 0) {
try (XContentParser parser = XContentHelper.createParser(xContentRegistry, percolateRequest.source())) {
String currentFieldName = null;
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("Unknown token [" + token+ "]");
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("doc".equals(currentFieldName)) {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.copyCurrentStructure(parser);
builder.flush();
documentSource = builder.bytes();
} else if ("query".equals(currentFieldName) || "filter".equals(currentFieldName)) {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.copyCurrentStructure(parser);
builder.flush();
querySource = builder.bytes();
} else if ("sort".equals(currentFieldName)) {
searchSource.field("sort");
searchSource.copyCurrentStructure(parser);
} else if ("aggregations".equals(currentFieldName)) {
searchSource.field("aggregations");
searchSource.copyCurrentStructure(parser);
} else if ("highlight".equals(currentFieldName)) {
searchSource.field("highlight");
searchSource.copyCurrentStructure(parser);
} else {
throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("sort".equals(currentFieldName)) {
searchSource.field("sort");
searchSource.copyCurrentStructure(parser);
} else {
throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]");
}
} else if (token.isValue()) {
if ("size".equals(currentFieldName)) {
if (percolateRequest.onlyCount()) {
throw new IllegalArgumentException("Cannot set size if onlyCount == true");
}
searchSource.field("size", parser.intValue());
} else if ("sort".equals(currentFieldName)) {
searchSource.field("sort", parser.text());
} else if ("track_scores".equals(currentFieldName) || "trackScores".equals(currentFieldName)) {
searchSource.field("track_scores", parser.booleanValue());
} else {
throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]");
}
} else {
throw new IllegalArgumentException("Unknown token [" + token + "]");
}
}
}
}
if (percolateRequest.onlyCount()) {
searchSource.field("size", 0);
}
PercolateQueryBuilder percolateQueryBuilder =
new PercolateQueryBuilder("query", percolateRequest.documentType(), documentSource);
if (querySource != null) {
try (XContentParser parser = XContentHelper.createParser(xContentRegistry, querySource)) {
QueryParseContext queryParseContext = new QueryParseContext(parser, parseFieldMatcher);
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
boolQueryBuilder.must(queryParseContext.parseInnerQueryBuilder());
boolQueryBuilder.filter(percolateQueryBuilder);
searchSource.field("query", boolQueryBuilder);
}
} else {
// wrapping in a constant score query with boost 0 for bwc reason.
// percolator api didn't emit scores before and never included scores
// for how well percolator queries matched with the document being percolated
searchSource.field("query", new ConstantScoreQueryBuilder(percolateQueryBuilder).boost(0f));
}
searchSource.endObject();
searchSource.flush();
BytesReference source = searchSource.bytes();
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(xContentRegistry, source)) {
QueryParseContext context = new QueryParseContext(parser, parseFieldMatcher);
searchSourceBuilder.parseXContent(context);
searchRequest.source(searchSourceBuilder);
return searchRequest;
}
}
public static PercolateResponse createPercolateResponse(SearchResponse searchResponse, boolean onlyCount) {
SearchHits hits = searchResponse.getHits();
PercolateResponse.Match[] matches;
if (onlyCount) {
matches = null;
} else {
matches = new PercolateResponse.Match[hits.getHits().length];
for (int i = 0; i < hits.getHits().length; i++) {
SearchHit hit = hits.getHits()[i];
matches[i] = new PercolateResponse.Match(new Text(hit.getIndex()),
new Text(hit.getId()), hit.getScore(), hit.getHighlightFields());
}
}
List<ShardOperationFailedException> shardFailures = new ArrayList<>(searchResponse.getShardFailures().length);
for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
shardFailures.add(new DefaultShardOperationFailedException(shardSearchFailure.index(), shardSearchFailure.shardId(),
shardSearchFailure.getCause()));
}
return new PercolateResponse(
searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getFailedShards(), shardFailures,
matches, hits.getTotalHits(), searchResponse.getTookInMillis(), (InternalAggregations) searchResponse.getAggregations()
);
}
}

View File

@ -1,414 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import static org.elasticsearch.percolator.PercolateSourceBuilder.docBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.percolator.PercolatorTestUtil.assertMatchCount;
import static org.elasticsearch.percolator.PercolatorTestUtil.preparePercolate;
import static org.elasticsearch.percolator.PercolatorTestUtil.prepareMultiPercolate;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class MultiPercolatorIT extends ESIntegTestCase {
private static final String INDEX_NAME = "queries";
private static final String TYPE_NAME = "query";
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(PercolatorPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return Collections.singleton(PercolatorPlugin.class);
}
public void testBasics() throws Exception {
assertAcked(prepareCreate(INDEX_NAME)
.addMapping(TYPE_NAME, "query", "type=percolator")
.addMapping("type", "field1", "type=text"));
ensureGreen();
logger.info("--> register a queries");
client().prepareIndex(INDEX_NAME, TYPE_NAME, "1")
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject())
.execute().actionGet();
client().prepareIndex(INDEX_NAME, TYPE_NAME, "2")
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject())
.execute().actionGet();
client().prepareIndex(INDEX_NAME, TYPE_NAME, "3")
.setSource(jsonBuilder().startObject().field("query", boolQuery()
.must(matchQuery("field1", "b"))
.must(matchQuery("field1", "c"))
).endObject())
.execute().actionGet();
client().prepareIndex(INDEX_NAME, TYPE_NAME, "4")
.setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
.execute().actionGet();
refresh();
MultiPercolateResponse response = prepareMultiPercolate(client())
.add(preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject())))
.add(preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject())))
.add(preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject())))
.add(preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject())))
.add(preparePercolate(client()) // non existing doc, so error element
.setIndices(INDEX_NAME).setDocumentType("type")
.setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5")))
.execute().actionGet();
MultiPercolateResponse.Item item = response.getItems()[0];
assertMatchCount(item.getResponse(), 2L);
assertThat(item.getResponse().getMatches(), arrayWithSize(2));
assertThat(item.getErrorMessage(), nullValue());
assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4"));
item = response.getItems()[1];
assertThat(item.getErrorMessage(), nullValue());
assertMatchCount(item.getResponse(), 2L);
assertThat(item.getResponse().getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4"));
item = response.getItems()[2];
assertThat(item.getErrorMessage(), nullValue());
assertMatchCount(item.getResponse(), 4L);
assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4"));
item = response.getItems()[3];
assertThat(item.getErrorMessage(), nullValue());
assertMatchCount(item.getResponse(), 1L);
assertThat(item.getResponse().getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContaining("4"));
item = response.getItems()[4];
assertThat(item.getResponse(), nullValue());
assertThat(item.getErrorMessage(), notNullValue());
assertThat(item.getErrorMessage(), containsString("[" + INDEX_NAME + "/type/5] doesn't exist"));
}
public void testWithRouting() throws Exception {
assertAcked(prepareCreate(INDEX_NAME)
.addMapping(TYPE_NAME, "query", "type=percolator")
.addMapping("type", "field1", "type=text"));
ensureGreen();
logger.info("--> register a queries");
client().prepareIndex(INDEX_NAME, TYPE_NAME, "1")
.setRouting("a")
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject())
.execute().actionGet();
client().prepareIndex(INDEX_NAME, TYPE_NAME, "2")
.setRouting("a")
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject())
.execute().actionGet();
client().prepareIndex(INDEX_NAME, TYPE_NAME, "3")
.setRouting("a")
.setSource(jsonBuilder().startObject().field("query", boolQuery()
.must(matchQuery("field1", "b"))
.must(matchQuery("field1", "c"))
).endObject())
.execute().actionGet();
client().prepareIndex(INDEX_NAME, TYPE_NAME, "4")
.setRouting("a")
.setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
.execute().actionGet();
refresh();
MultiPercolateResponse response = prepareMultiPercolate(client())
.add(preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setRouting("a")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject())))
.add(preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setRouting("a")
.setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject())))
.add(preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setRouting("a")
.setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject())))
.add(preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setRouting("a")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject())))
.add(preparePercolate(client()) // non existing doc, so error element
.setIndices(INDEX_NAME).setDocumentType("type")
.setRouting("a")
.setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5")))
.execute().actionGet();
MultiPercolateResponse.Item item = response.getItems()[0];
assertMatchCount(item.getResponse(), 2L);
assertThat(item.getResponse().getMatches(), arrayWithSize(2));
assertThat(item.getErrorMessage(), nullValue());
assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4"));
item = response.getItems()[1];
assertThat(item.getErrorMessage(), nullValue());
assertMatchCount(item.getResponse(), 2L);
assertThat(item.getResponse().getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4"));
item = response.getItems()[2];
assertThat(item.getErrorMessage(), nullValue());
assertMatchCount(item.getResponse(), 4L);
assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4"));
item = response.getItems()[3];
assertThat(item.getErrorMessage(), nullValue());
assertMatchCount(item.getResponse(), 1L);
assertThat(item.getResponse().getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(item.getResponse().getMatches(), INDEX_NAME), arrayContaining("4"));
item = response.getItems()[4];
assertThat(item.getResponse(), nullValue());
assertThat(item.getErrorMessage(), notNullValue());
assertThat(item.getErrorMessage(), containsString("[" + INDEX_NAME + "/type/5] doesn't exist"));
}
public void testExistingDocsOnly() throws Exception {
prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get();
int numQueries = randomIntBetween(50, 100);
logger.info("--> register a queries");
for (int i = 0; i < numQueries; i++) {
client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i))
.setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
.execute().actionGet();
}
client().prepareIndex(INDEX_NAME, "type", "1")
.setSource(jsonBuilder().startObject().field("field", "a").endObject())
.execute().actionGet();
refresh();
MultiPercolateRequestBuilder builder = prepareMultiPercolate(client());
int numPercolateRequest = randomIntBetween(50, 100);
for (int i = 0; i < numPercolateRequest; i++) {
builder.add(
preparePercolate(client())
.setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1"))
.setIndices(INDEX_NAME).setDocumentType("type")
.setSize(numQueries)
);
}
MultiPercolateResponse response = builder.execute().actionGet();
assertThat(response.items().length, equalTo(numPercolateRequest));
for (MultiPercolateResponse.Item item : response) {
assertThat(item.isFailure(), equalTo(false));
assertMatchCount(item.getResponse(), numQueries);
assertThat(item.getResponse().getMatches().length, equalTo(numQueries));
}
// Non existing doc
builder = prepareMultiPercolate(client());
for (int i = 0; i < numPercolateRequest; i++) {
builder.add(
preparePercolate(client())
.setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2"))
.setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries)
);
}
response = builder.execute().actionGet();
assertThat(response.items().length, equalTo(numPercolateRequest));
for (MultiPercolateResponse.Item item : response) {
assertThat(item.isFailure(), equalTo(true));
assertThat(item.getErrorMessage(), containsString("doesn't exist"));
assertThat(item.getResponse(), nullValue());
}
// One existing doc
builder = prepareMultiPercolate(client());
for (int i = 0; i < numPercolateRequest; i++) {
builder.add(
preparePercolate(client())
.setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2"))
.setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries)
);
}
builder.add(
preparePercolate(client())
.setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1"))
.setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries)
);
response = builder.execute().actionGet();
assertThat(response.items().length, equalTo(numPercolateRequest + 1));
assertThat(response.items()[numPercolateRequest].isFailure(), equalTo(false));
assertMatchCount(response.items()[numPercolateRequest].getResponse(), numQueries);
assertThat(response.items()[numPercolateRequest].getResponse().getMatches().length, equalTo(numQueries));
}
public void testWithDocsOnly() throws Exception {
prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get();
ensureGreen();
int numQueries = randomIntBetween(50, 100);
logger.info("--> register a queries");
for (int i = 0; i < numQueries; i++) {
client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i))
.setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
.execute().actionGet();
}
refresh();
MultiPercolateRequestBuilder builder = prepareMultiPercolate(client());
int numPercolateRequest = randomIntBetween(50, 100);
for (int i = 0; i < numPercolateRequest; i++) {
builder.add(
preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setSize(numQueries)
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "a").endObject())));
}
MultiPercolateResponse response = builder.execute().actionGet();
assertThat(response.items().length, equalTo(numPercolateRequest));
for (MultiPercolateResponse.Item item : response) {
assertThat(item.isFailure(), equalTo(false));
assertMatchCount(item.getResponse(), numQueries);
assertThat(item.getResponse().getMatches().length, equalTo(numQueries));
}
// All illegal json
builder = prepareMultiPercolate(client());
for (int i = 0; i < numPercolateRequest; i++) {
builder.add(
preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setSource("illegal json"));
}
response = builder.execute().actionGet();
assertThat(response.items().length, equalTo(numPercolateRequest));
for (MultiPercolateResponse.Item item : response) {
assertThat(item.isFailure(), equalTo(true));
assertThat(item.getFailure(), notNullValue());
}
// one valid request
builder = prepareMultiPercolate(client());
for (int i = 0; i < numPercolateRequest; i++) {
builder.add(
preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("type")
.setSource("illegal json"));
}
builder.add(
preparePercolate(client())
.setSize(numQueries)
.setIndices(INDEX_NAME).setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "a").endObject())));
response = builder.execute().actionGet();
assertThat(response.items().length, equalTo(numPercolateRequest + 1));
assertThat(response.items()[numPercolateRequest].isFailure(), equalTo(false));
assertMatchCount(response.items()[numPercolateRequest].getResponse(), numQueries);
assertThat(response.items()[numPercolateRequest].getResponse().getMatches().length, equalTo(numQueries));
}
public void testNestedMultiPercolation() throws IOException {
initNestedIndexAndPercolation();
MultiPercolateRequestBuilder mpercolate= prepareMultiPercolate(client());
mpercolate.add(preparePercolate(client()).setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company"));
mpercolate.add(preparePercolate(client()).setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company"));
MultiPercolateResponse response = mpercolate.get();
assertEquals(response.getItems()[0].getResponse().getMatches().length, 0);
assertEquals(response.getItems()[1].getResponse().getMatches().length, 1);
assertEquals(response.getItems()[1].getResponse().getMatches()[0].getId().string(), "Q");
}
void initNestedIndexAndPercolation() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder();
mapping.startObject().startObject("properties").startObject("companyname").field("type", "text").endObject()
.startObject("employee").field("type", "nested").startObject("properties")
.startObject("name").field("type", "text").endObject().endObject().endObject().endObject()
.endObject();
assertAcked(client().admin().indices().prepareCreate(INDEX_NAME)
.addMapping(TYPE_NAME, "query", "type=percolator")
.addMapping("company", mapping));
ensureGreen(INDEX_NAME);
client().prepareIndex(INDEX_NAME, TYPE_NAME, "Q").setSource(jsonBuilder().startObject()
.field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND), ScoreMode.Avg)).endObject()).get();
refresh();
}
XContentBuilder getMatchingNestedDoc() throws IOException {
XContentBuilder doc = XContentFactory.jsonBuilder();
doc.startObject().field("companyname", "stark").startArray("employee")
.startObject().field("name", "virginia potts").endObject()
.startObject().field("name", "tony stark").endObject()
.endArray().endObject();
return doc;
}
XContentBuilder getNotMatchingNestedDoc() throws IOException {
XContentBuilder doc = XContentFactory.jsonBuilder();
doc.startObject().field("companyname", "notstark").startArray("employee")
.startObject().field("name", "virginia stark").endObject()
.startObject().field("name", "tony potts").endObject()
.endArray().endObject();
return doc;
}
}

View File

@ -1,193 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.StreamsUtils;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class MultiPercolatorRequestTests extends ESTestCase {
public void testParseBulkRequests() throws Exception {
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/percolator/mpercolate1.json");
MultiPercolateRequest request = new MultiPercolateRequest().add(data, 0, data.length);
assertThat(request.requests().size(), equalTo(8));
PercolateRequest percolateRequest = request.requests().get(0);
assertThat(percolateRequest.indices()[0], equalTo("my-index1"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.preference(), equalTo("_local"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.strictExpandOpenAndForbidClosed()));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.getRequest(), nullValue());
assertThat(percolateRequest.source(), notNullValue());
Map sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), equalTo((Object) MapBuilder.newMapBuilder().put("field1", "value1").map()));
percolateRequest = request.requests().get(1);
assertThat(percolateRequest.indices()[0], equalTo("my-index2"));
assertThat(percolateRequest.indices()[1], equalTo("my-index3"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.preference(), equalTo("_local"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.fromOptions(true, true, true, false, IndicesOptions.strictExpandOpenAndForbidClosed())));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.getRequest(), nullValue());
assertThat(percolateRequest.source(), notNullValue());
sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), equalTo((Object) MapBuilder.newMapBuilder().put("field1", "value2").map()));
percolateRequest = request.requests().get(2);
assertThat(percolateRequest.indices()[0], equalTo("my-index4"));
assertThat(percolateRequest.indices()[1], equalTo("my-index5"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.preference(), equalTo("_local"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.fromOptions(false, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
assertThat(percolateRequest.onlyCount(), equalTo(true));
assertThat(percolateRequest.getRequest(), nullValue());
assertThat(percolateRequest.source(), notNullValue());
sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), equalTo((Object) MapBuilder.newMapBuilder().put("field1", "value3").map()));
percolateRequest = request.requests().get(3);
assertThat(percolateRequest.indices()[0], equalTo("my-index6"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.preference(), equalTo("_local"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.fromOptions(false, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.getRequest(), notNullValue());
assertThat(percolateRequest.getRequest().id(), equalTo("1"));
assertThat(percolateRequest.getRequest().type(), equalTo("my-type1"));
assertThat(percolateRequest.getRequest().index(), equalTo("my-index6"));
assertThat(percolateRequest.getRequest().routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.getRequest().preference(), equalTo("_local"));
percolateRequest = request.requests().get(4);
assertThat(percolateRequest.indices()[0], equalTo("my-index7"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.preference(), equalTo("_local"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.strictExpandOpenAndForbidClosed()));
assertThat(percolateRequest.onlyCount(), equalTo(true));
assertThat(percolateRequest.getRequest(), notNullValue());
assertThat(percolateRequest.getRequest().id(), equalTo("2"));
assertThat(percolateRequest.getRequest().type(), equalTo("my-type1"));
assertThat(percolateRequest.getRequest().index(), equalTo("my-index7"));
assertThat(percolateRequest.getRequest().routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.getRequest().preference(), equalTo("_local"));
percolateRequest = request.requests().get(5);
assertThat(percolateRequest.indices()[0], equalTo("my-index8"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.preference(), equalTo("primary"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.strictExpandOpenAndForbidClosed()));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.getRequest(), nullValue());
assertThat(percolateRequest.source(), notNullValue());
sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), equalTo((Object) MapBuilder.newMapBuilder().put("field1", "value4").map()));
percolateRequest = request.requests().get(6);
assertThat(percolateRequest.indices()[0], equalTo("percolate-index1"));
assertThat(percolateRequest.documentType(), equalTo("other-type"));
assertThat(percolateRequest.routing(), equalTo("percolate-routing-1"));
assertThat(percolateRequest.preference(), equalTo("_local"));
assertThat(percolateRequest.getRequest(), notNullValue());
assertThat(percolateRequest.getRequest().indices()[0], equalTo("my-index9"));
assertThat(percolateRequest.getRequest().type(), equalTo("my-type1"));
assertThat(percolateRequest.getRequest().routing(), nullValue());
assertThat(percolateRequest.getRequest().preference(), nullValue());
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.strictExpandOpenAndForbidClosed()));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.source(), notNullValue());
sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), nullValue());
percolateRequest = request.requests().get(7);
assertThat(percolateRequest.indices()[0], equalTo("my-index10"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), nullValue());
assertThat(percolateRequest.preference(), nullValue());
assertThat(percolateRequest.getRequest(), notNullValue());
assertThat(percolateRequest.getRequest().indices()[0], equalTo("my-index10"));
assertThat(percolateRequest.getRequest().type(), equalTo("my-type1"));
assertThat(percolateRequest.getRequest().routing(), nullValue());
assertThat(percolateRequest.getRequest().preference(), nullValue());
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.fromOptions(false, false, true, false, IndicesOptions.strictExpandOpenAndForbidClosed())));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.source(), notNullValue());
sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), nullValue());
}
public void testParseBulkRequestsDefaults() throws Exception {
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/percolator/mpercolate2.json");
MultiPercolateRequest request = new MultiPercolateRequest();
request.indices("my-index1").documentType("my-type1").indicesOptions(IndicesOptions.lenientExpandOpen());
request.add(data, 0, data.length);
assertThat(request.requests().size(), equalTo(3));
PercolateRequest percolateRequest = request.requests().get(0);
assertThat(percolateRequest.indices()[0], equalTo("my-index1"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.preference(), equalTo("_local"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.lenientExpandOpen()));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.getRequest(), nullValue());
assertThat(percolateRequest.source(), notNullValue());
Map sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), equalTo((Object) MapBuilder.newMapBuilder().put("field1", "value1").map()));
percolateRequest = request.requests().get(1);
assertThat(percolateRequest.indices()[0], equalTo("my-index1"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.routing(), equalTo("my-routing-1"));
assertThat(percolateRequest.preference(), equalTo("_local"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.lenientExpandOpen()));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.getRequest(), nullValue());
assertThat(percolateRequest.source(), notNullValue());
sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), equalTo((Object) MapBuilder.newMapBuilder().put("field1", "value2").map()));
percolateRequest = request.requests().get(2);
assertThat(percolateRequest.indices()[0], equalTo("my-index1"));
assertThat(percolateRequest.documentType(), equalTo("my-type1"));
assertThat(percolateRequest.indicesOptions(), equalTo(IndicesOptions.lenientExpandOpen()));
assertThat(percolateRequest.onlyCount(), equalTo(false));
assertThat(percolateRequest.getRequest(), nullValue());
assertThat(percolateRequest.source(), notNullValue());
sourceMap = createParser(JsonXContent.jsonXContent, percolateRequest.source()).map();
assertThat(sourceMap.get("doc"), equalTo((Object) MapBuilder.newMapBuilder().put("field1", "value3").map()));
}
}

View File

@ -1,293 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.percolator.PercolateSourceBuilder.docBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.percolator.PercolatorTestUtil.assertMatchCount;
import static org.elasticsearch.percolator.PercolatorTestUtil.preparePercolate;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
public class PercolatorAggregationsIT extends ESIntegTestCase {
private static final String INDEX_NAME = "queries";
private static final String TYPE_NAME = "query";
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(PercolatorPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return Collections.singleton(PercolatorPlugin.class);
}
// Just test the integration with facets and aggregations, not the facet and aggregation functionality!
public void testAggregations() throws Exception {
assertAcked(prepareCreate(INDEX_NAME)
.addMapping(TYPE_NAME, "query", "type=percolator")
.addMapping("type", "field1", "type=text", "field2", "type=keyword"));
ensureGreen();
int numQueries = scaledRandomIntBetween(250, 500);
int numUniqueQueries = between(1, numQueries / 2);
String[] values = new String[numUniqueQueries];
for (int i = 0; i < values.length; i++) {
values[i] = "value" + i;
}
int[] expectedCount = new int[numUniqueQueries];
logger.info("--> registering {} queries", numQueries);
for (int i = 0; i < numQueries; i++) {
String value = values[i % numUniqueQueries];
expectedCount[i % numUniqueQueries]++;
QueryBuilder queryBuilder = matchQuery("field1", value);
client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i))
.setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", "b").endObject()).execute()
.actionGet();
}
refresh();
for (int i = 0; i < numQueries; i++) {
String value = values[i % numUniqueQueries];
PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client())
.setIndices(INDEX_NAME)
.setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()));
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
percolateRequestBuilder.addAggregation(AggregationBuilders.terms("a").field("field2").collectMode(aggCollectionMode));
if (randomBoolean()) {
percolateRequestBuilder.setPercolateQuery(matchAllQuery());
}
boolean countOnly = randomBoolean();
if (countOnly) {
percolateRequestBuilder.setOnlyCount(countOnly);
} else {
// can only set size if we also keep track of matches (i.e. countOnly == false)
if (randomBoolean()) {
percolateRequestBuilder.setScore(true).setSize(expectedCount[i % numUniqueQueries]);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
}
PercolateResponse response = percolateRequestBuilder.execute().actionGet();
assertMatchCount(response, expectedCount[i % numUniqueQueries]);
if (!countOnly) {
assertThat(response.getMatches(), arrayWithSize(expectedCount[i % numUniqueQueries]));
}
List<Aggregation> aggregations = response.getAggregations().asList();
assertThat(aggregations.size(), equalTo(1));
assertThat(aggregations.get(0).getName(), equalTo("a"));
List<Terms.Bucket> buckets = new ArrayList<>(((Terms) aggregations.get(0)).getBuckets());
assertThat(buckets.size(), equalTo(1));
assertThat(buckets.get(0).getKeyAsString(), equalTo("b"));
assertThat(buckets.get(0).getDocCount(), equalTo((long) expectedCount[i % values.length]));
}
}
// Just test the integration with facets and aggregations, not the facet and aggregation functionality!
public void testAggregationsAndPipelineAggregations() throws Exception {
assertAcked(prepareCreate(INDEX_NAME)
.addMapping(TYPE_NAME, "query", "type=percolator")
.addMapping("type", "field1", "type=text", "field2", "type=keyword"));
ensureGreen();
int numQueries = scaledRandomIntBetween(250, 500);
int numUniqueQueries = between(1, numQueries / 2);
String[] values = new String[numUniqueQueries];
for (int i = 0; i < values.length; i++) {
values[i] = "value" + i;
}
int[] expectedCount = new int[numUniqueQueries];
logger.info("--> registering {} queries", numQueries);
for (int i = 0; i < numQueries; i++) {
String value = values[i % numUniqueQueries];
expectedCount[i % numUniqueQueries]++;
QueryBuilder queryBuilder = matchQuery("field1", value);
client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i))
.setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", "b").endObject()).execute()
.actionGet();
}
refresh();
for (int i = 0; i < numQueries; i++) {
String value = values[i % numUniqueQueries];
PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client())
.setIndices(INDEX_NAME)
.setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()));
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
percolateRequestBuilder.addAggregation(AggregationBuilders.terms("a").field("field2").collectMode(aggCollectionMode));
if (randomBoolean()) {
percolateRequestBuilder.setPercolateQuery(matchAllQuery());
}
boolean countOnly = randomBoolean();
if (countOnly) {
percolateRequestBuilder.setOnlyCount(countOnly);
} else {
// can only set size if we also keep track of matches (i.e. countOnly == false)
if (randomBoolean()) {
percolateRequestBuilder.setScore(true).setSize(expectedCount[i % numUniqueQueries]);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
}
percolateRequestBuilder.addAggregation(PipelineAggregatorBuilders.maxBucket("max_a", "a>_count"));
PercolateResponse response = percolateRequestBuilder.execute().actionGet();
assertMatchCount(response, expectedCount[i % numUniqueQueries]);
if (!countOnly) {
assertThat(response.getMatches(), arrayWithSize(expectedCount[i % numUniqueQueries]));
}
Aggregations aggregations = response.getAggregations();
assertThat(aggregations.asList().size(), equalTo(2));
Terms terms = aggregations.get("a");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("a"));
List<Terms.Bucket> buckets = new ArrayList<>(terms.getBuckets());
assertThat(buckets.size(), equalTo(1));
assertThat(buckets.get(0).getKeyAsString(), equalTo("b"));
assertThat(buckets.get(0).getDocCount(), equalTo((long) expectedCount[i % values.length]));
InternalBucketMetricValue maxA = aggregations.get("max_a");
assertThat(maxA, notNullValue());
assertThat(maxA.getName(), equalTo("max_a"));
assertThat(maxA.value(), equalTo((double) expectedCount[i % values.length]));
assertThat(maxA.keys(), equalTo(new String[] { "b" }));
}
}
public void testSignificantAggs() throws Exception {
client().admin().indices().prepareCreate(INDEX_NAME)
.addMapping(TYPE_NAME, "query", "type=percolator")
.execute().actionGet();
ensureGreen();
PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client()).setIndices(INDEX_NAME).setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value").endObject()))
.addAggregation(AggregationBuilders.significantTerms("a").field("field2"));
PercolateResponse response = percolateRequestBuilder.get();
assertNoFailures(response);
}
public void testSingleShardAggregations() throws Exception {
assertAcked(prepareCreate(INDEX_NAME).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))
.addMapping(TYPE_NAME, "query", "type=percolator")
.addMapping("type", "field1", "type=text", "field2", "type=keyword"));
ensureGreen();
int numQueries = scaledRandomIntBetween(250, 500);
logger.info("--> registering {} queries", numQueries);
for (int i = 0; i < numQueries; i++) {
String value = "value0";
QueryBuilder queryBuilder = matchQuery("field1", value);
client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i))
.setSource(jsonBuilder().startObject().field("query", queryBuilder).field("field2", i % 3 == 0 ? "b" : "a").endObject())
.execute()
.actionGet();
}
refresh();
for (int i = 0; i < numQueries; i++) {
String value = "value0";
PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client())
.setIndices(INDEX_NAME)
.setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()));
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
percolateRequestBuilder.addAggregation(AggregationBuilders.terms("terms").field("field2").collectMode(aggCollectionMode)
.order(Order.term(true)).shardSize(2).size(1));
if (randomBoolean()) {
percolateRequestBuilder.setPercolateQuery(matchAllQuery());
}
boolean countOnly = randomBoolean();
if (countOnly) {
percolateRequestBuilder.setOnlyCount(countOnly);
} else {
// can only set size if we also keep track of matches (i.e. countOnly == false)
if (randomBoolean()) {
percolateRequestBuilder.setScore(true).setSize(numQueries);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
}
percolateRequestBuilder.addAggregation(PipelineAggregatorBuilders.maxBucket("max_terms", "terms>_count"));
PercolateResponse response = percolateRequestBuilder.execute().actionGet();
assertMatchCount(response, numQueries);
if (!countOnly) {
assertThat(response.getMatches(), arrayWithSize(numQueries));
}
Aggregations aggregations = response.getAggregations();
assertThat(aggregations.asList().size(), equalTo(2));
Terms terms = aggregations.get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<Terms.Bucket> buckets = new ArrayList<>(terms.getBuckets());
assertThat(buckets.size(), equalTo(1));
assertThat(buckets.get(0).getKeyAsString(), equalTo("a"));
InternalBucketMetricValue maxA = aggregations.get("max_terms");
assertThat(maxA, notNullValue());
assertThat(maxA.getName(), equalTo("max_terms"));
assertThat(maxA.keys(), equalTo(new String[] { "a" }));
}
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.percolator;
import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;

View File

@ -19,6 +19,8 @@
package org.elasticsearch.percolator; package org.elasticsearch.percolator;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
@ -48,6 +50,8 @@ import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
@ -58,16 +62,13 @@ import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.percolator.PercolateSourceBuilder.docBuilder;
import static org.elasticsearch.percolator.PercolatorTestUtil.assertMatchCount;
import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray;
import static org.elasticsearch.percolator.PercolatorTestUtil.preparePercolate;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.IsNull.notNullValue;
public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
@ -87,19 +88,16 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
public void testPercolateScriptQuery() throws IOException { public void testPercolateScriptQuery() throws IOException {
client().admin().indices().prepareCreate("index").addMapping("type", "query", "type=percolator").get(); client().admin().indices().prepareCreate("index").addMapping("type", "query", "type=percolator").get();
ensureGreen();
client().prepareIndex("index", "type", "1") client().prepareIndex("index", "type", "1")
.setSource(jsonBuilder().startObject().field("query", QueryBuilders.scriptQuery( .setSource(jsonBuilder().startObject().field("query", QueryBuilders.scriptQuery(
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "1==1", Collections.emptyMap()))).endObject()) new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "1==1", Collections.emptyMap()))).endObject())
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.execute().actionGet(); .execute().actionGet();
PercolateResponse response = preparePercolate(client()) SearchResponse response = client().prepareSearch("index")
.setIndices("index").setDocumentType("type") .setQuery(new PercolateQueryBuilder("query", "type", jsonBuilder().startObject().field("field1", "b").endObject().bytes()))
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject())) .get();
.execute().actionGet(); assertHitCount(response, 1);
assertMatchCount(response, 1L); assertSearchHits(response, "1");
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "index"), arrayContainingInAnyOrder("1"));
} }
public void testPercolatorQuery() throws Exception { public void testPercolatorQuery() throws Exception {
@ -605,4 +603,82 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase {
assertHitCount(response, 0); assertHitCount(response, 0);
} }
public void testPercolatorQueryViaMultiSearch() throws Exception {
createIndex("test", client().admin().indices().prepareCreate("test")
.addMapping("type", "field1", "type=text")
.addMapping("queries", "query", "type=percolator")
);
client().prepareIndex("test", "queries", "1")
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "b")).field("a", "b").endObject())
.execute().actionGet();
client().prepareIndex("test", "queries", "2")
.setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "c")).endObject())
.execute().actionGet();
client().prepareIndex("test", "queries", "3")
.setSource(jsonBuilder().startObject().field("query", boolQuery()
.must(matchQuery("field1", "b"))
.must(matchQuery("field1", "c"))
).endObject())
.execute().actionGet();
client().prepareIndex("test", "queries", "4")
.setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
.execute().actionGet();
client().prepareIndex("test", "type", "1")
.setSource(jsonBuilder().startObject().field("field1", "c").endObject())
.execute().actionGet();
client().admin().indices().prepareRefresh().get();
MultiSearchResponse response = client().prepareMultiSearch()
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", "type",
jsonBuilder().startObject().field("field1", "b").endObject().bytes())))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", "type",
yamlBuilder().startObject().field("field1", "c").endObject().bytes())))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", "type",
smileBuilder().startObject().field("field1", "b c").endObject().bytes())))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", "type",
jsonBuilder().startObject().field("field1", "d").endObject().bytes())))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", "type", "test", "type", "1", null, null, null)))
.add(client().prepareSearch("test") // non existing doc, so error element
.setQuery(new PercolateQueryBuilder("query", "type", "test", "type", "2", null, null, null)))
.get();
MultiSearchResponse.Item item = response.getResponses()[0];
assertHitCount(item.getResponse(), 2L);
assertSearchHits(item.getResponse(), "1", "4");
assertThat(item.getFailureMessage(), nullValue());
item = response.getResponses()[1];
assertHitCount(item.getResponse(), 2L);
assertSearchHits(item.getResponse(), "2", "4");
assertThat(item.getFailureMessage(), nullValue());
item = response.getResponses()[2];
assertHitCount(item.getResponse(), 4L);
assertSearchHits(item.getResponse(), "1", "2", "3", "4");
assertThat(item.getFailureMessage(), nullValue());
item = response.getResponses()[3];
assertHitCount(item.getResponse(), 1L);
assertSearchHits(item.getResponse(), "4");
assertThat(item.getFailureMessage(), nullValue());
item = response.getResponses()[4];
assertHitCount(item.getResponse(), 2L);
assertSearchHits(item.getResponse(), "2", "4");
assertThat(item.getFailureMessage(), nullValue());
item = response.getResponses()[5];
assertThat(item.getResponse(), nullValue());
assertThat(item.getFailureMessage(), notNullValue());
assertThat(item.getFailureMessage(), equalTo("all shards failed"));
assertThat(ExceptionsHelper.unwrapCause(item.getFailure().getCause()).getMessage(),
containsString("[test/type/2] couldn't be found"));
}
} }

View File

@ -1,60 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Strings;
import org.junit.Assert;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertVersionSerializable;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.formatShardStatus;
/** Static method pulled out of PercolatorIT, used by other tests */
public class PercolatorTestUtil extends Assert {
public static PercolateRequestBuilder preparePercolate(ElasticsearchClient client) {
return new PercolateRequestBuilder(client, PercolateAction.INSTANCE);
}
public static MultiPercolateRequestBuilder prepareMultiPercolate(ElasticsearchClient client) {
return new MultiPercolateRequestBuilder(client, MultiPercolateAction.INSTANCE);
}
public static void assertMatchCount(PercolateResponse percolateResponse, long expectedHitCount) {
if (percolateResponse.getCount() != expectedHitCount) {
fail("Count is " + percolateResponse.getCount() + " but " + expectedHitCount + " was expected. " +
formatShardStatus(percolateResponse));
}
assertVersionSerializable(percolateResponse);
}
public static String[] convertFromTextArray(PercolateResponse.Match[] matches, String index) {
if (matches.length == 0) {
return Strings.EMPTY_ARRAY;
}
String[] strings = new String[matches.length];
for (int i = 0; i < matches.length; i++) {
assertEquals(index, matches[i].getIndex().string());
strings[i] = matches[i].getId().string();
}
return strings;
}
}

View File

@ -1,16 +0,0 @@
{"percolate" : {"index" : "my-index1", "type" : "my-type1", "routing" : "my-routing-1", "preference" : "_local", "ignore_unavailable" : false}}
{"doc" : {"field1" : "value1"}}
{"percolate" : {"indices" : ["my-index2", "my-index3"], "type" : "my-type1", "routing" : "my-routing-1", "preference" : "_local", "ignore_unavailable" : true}}
{"doc" : {"field1" : "value2"}}
{"count" : {"indices" : ["my-index4", "my-index5"], "type" : "my-type1", "routing" : "my-routing-1", "preference" : "_local", "expand_wildcards" : "open,closed"}}
{"doc" : {"field1" : "value3"}}
{"percolate" : {"id" : "1", "index" : "my-index6", "type" : "my-type1", "routing" : "my-routing-1", "preference" : "_local", "expand_wildcards" : ["open", "closed"]}}
{}
{"count" : {"id" : "2", "index" : "my-index7", "type" : "my-type1", "routing" : "my-routing-1", "preference" : "_local"}}
{}
{"percolate" : {"index" : "my-index8", "type" : "my-type1", "routing" : "my-routing-1", "preference" : "primary"}}
{"doc" : {"field1" : "value4"}}
{"percolate" : {"id" : "3", "index" : "my-index9", "type" : "my-type1", "percolate_index": "percolate-index1", "percolate_type": "other-type", "percolate_preference": "_local", "percolate_routing": "percolate-routing-1"}}
{}
{"percolate" : {"id" : "4", "index" : "my-index10", "type" : "my-type1", "allow_no_indices": false, "expand_wildcards" : ["open"]}}
{}

View File

@ -1,6 +0,0 @@
{"percolate" : {"routing" : "my-routing-1", "preference" : "_local"}}
{"doc" : {"field1" : "value1"}}
{"percolate" : {"index" : "my-index1", "type" : "my-type1", "routing" : "my-routing-1", "preference" : "_local", "ignore_unavailable" : true}}
{"doc" : {"field1" : "value2"}}
{"percolate" : {}}
{"doc" : {"field1" : "value3"}}

View File

@ -0,0 +1,50 @@
---
"Test percolator basics via rest":
- do:
indices.create:
index: queries_index
body:
mappings:
queries:
properties:
query:
type: percolator
test_type:
properties:
foo:
type: keyword
- do:
index:
index: queries_index
type: queries
id: test_percolator
body:
query:
match_all: {}
- do:
indices.refresh: {}
- do:
search:
body:
- query:
percolate:
document_type: test_type
field: query
document:
foo: bar
- match: { hits.total: 1 }
- do:
msearch:
body:
- index: queries_index
- query:
percolate:
document_type: test_type
field: query
document:
foo: bar
- match: { responses.0.hits.total: 1 }

View File

@ -1,56 +0,0 @@
---
"Basic multi-percolate":
- do:
indices.create:
index: percolator_index
body:
mappings:
queries:
properties:
query:
type: percolator
- do:
index:
index: percolator_index
type: my_type
id: 1
body: {foo: bar}
- do:
index:
index: percolator_index
type: queries
id: test_percolator
body:
query:
match_all: {}
- do:
indices.refresh: {}
- do:
mpercolate:
body:
- percolate:
index: percolator_index
type: my_type
- doc:
foo: bar
- percolate:
index: percolator_index1
type: my_type
- doc:
foo: bar
- percolate:
index: percolator_index
type: my_type
id: 1
- {}
- match: { responses.0.total: 1 }
- match: { responses.1.error.root_cause.0.type: index_not_found_exception }
- match: { responses.1.error.root_cause.0.reason: "/no.such.index/" }
- match: { responses.1.error.root_cause.0.index: percolator_index1 }
- match: { responses.2.total: 1 }

View File

@ -1,46 +0,0 @@
---
"Basic percolation tests":
- do:
indices.create:
index: test_index
body:
mappings:
queries:
properties:
query:
type: percolator
- do:
index:
index: test_index
type: queries
id: test_percolator
body:
query:
match_all: {}
- do:
indices.refresh: {}
- do:
percolate:
index: test_index
type: test_type
body:
doc:
foo: bar
- match: {'total': 1}
- match: {'matches': [{_index: test_index, _id: test_percolator}]}
- do:
count_percolate:
index: test_index
type: test_type
body:
doc:
foo: bar
- is_false: matches
- match: {'total': 1}

View File

@ -1,118 +0,0 @@
---
"Percolate existing documents":
- do:
indices.create:
index: percolator_index
body:
mappings:
queries:
properties:
query:
type: percolator
- do:
index:
index: percolator_index
type: queries
id: test_percolator
body:
query:
match_all: {}
tag: tag1
- do:
index:
index: percolator_index
type: test_type
id: 1
body:
foo: bar
- do:
indices.create:
index: my_index
- do:
index:
index: my_index
type: my_type
id: 1
body:
foo: bar
- do:
indices.refresh: {}
- do:
percolate:
index: percolator_index
type: test_type
id: 1
- match: {'matches': [{_index: percolator_index, _id: test_percolator}]}
- do:
percolate:
index: my_index
type: my_type
id: 1
percolate_index: percolator_index
percolate_type: test_type
- match: {'matches': [{_index: percolator_index, _id: test_percolator}]}
- do:
index:
index: my_index
type: my_type
id: 1
body:
foo: bar
- do:
percolate:
index: my_index
type: my_type
id: 1
version: 2
percolate_index: percolator_index
percolate_type: test_type
- match: {'matches': [{_index: percolator_index, _id: test_percolator}]}
- do:
catch: conflict
percolate:
index: my_index
type: my_type
id: 1
version: 1
percolate_index: percolator_index
percolate_type: test_type
- do:
percolate:
index: percolator_index
type: test_type
id: 1
body:
filter:
term:
tag: non_existing_tag
- match: {'matches': []}
- do:
percolate:
index: percolator_index
type: test_type
id: 1
body:
filter:
term:
tag: tag1
- match: {'matches': [{_index: percolator_index, _id: test_percolator, _score: 0.2876821}]}

View File

@ -1,37 +0,0 @@
---
"Basic percolation tests on an empty cluster":
- do:
indices.create:
index: test_index
body:
mappings:
queries:
properties:
query:
type: percolator
- do:
indices.refresh: {}
- do:
percolate:
index: test_index
type: test_type
body:
doc:
foo: bar
- match: {'total': 0}
- match: {'matches': []}
- do:
count_percolate:
index: test_index
type: test_type
body:
doc:
foo: bar
- is_false: matches
- match: {'total': 0}

View File

@ -1,46 +0,0 @@
---
"Basic percolation highlight query test":
- do:
indices.create:
index: test_index
body:
mappings:
type_1:
properties:
foo:
type: text
queries:
properties:
query:
type: percolator
- do:
index:
index: test_index
type: queries
id: test_percolator
body:
query:
match:
foo: bar
- do:
indices.refresh: {}
- do:
percolate:
index: test_index
type: type_1
body:
doc:
foo: "bar foo"
size: 1
highlight:
fields:
foo:
highlight_query:
match:
foo: foo
- match: {'total': 1}

View File

@ -1,117 +0,0 @@
---
setup:
- do:
indices.create:
index: nestedindex
body:
mappings:
company:
properties:
companyname:
type: text
employee:
type: nested
properties:
name:
type: text
queries:
properties:
query:
type: percolator
- do:
indices.refresh: {}
- do:
index:
index: nestedindex
type: "queries"
id: query
body: { "query": { "nested": { "path": "employee", "score_mode": "avg", "query": { "match": { "employee.name": { "query": "virginia potts", "operator": "and"} } } } } }
- do:
indices.refresh: {}
---
"Basic percolation tests on nested doc":
- do:
percolate:
index: nestedindex
type: company
body: { "doc": { "companyname": "stark", "employee": [ { "name": "virginia stark"}, { "name": "tony potts"} ] } }
- match: {'total': 0}
- do:
percolate:
index: nestedindex
type: company
body: { "doc": { "companyname": "stark", "employee": [ { "name": "virginia potts"}, { "name": "tony stark"} ] } }
- match: {'total': 1}
---
"Percolate existing docs":
- do:
index:
index: nestedindex
type: company
id: notmatching
body: { "companyname": "stark", "employee": [ { "name": "virginia stark"}, { "name": "tony potts"} ] }
- do:
index:
index: nestedindex
type: company
id: matching
body: { "companyname": "stark", "employee": [ { "name": "virginia potts"}, { "name": "tony stark"} ] }
- do:
indices.refresh: {}
- do:
percolate:
index: nestedindex
type: company
id: notmatching
- match: {'total': 0}
- do:
percolate:
index: nestedindex
type: company
id: matching
- match: {'total': 1}
---
"Test multi percolate":
- do:
mpercolate:
body:
- percolate: {"index": "nestedindex", "type": "company"}
- doc: { "companyname": "stark", "employee": [ { "name": "virginia stark"}, { "name": "tony potts"} ] }
- percolate: {"index": "nestedindex", "type": "company"}
- doc: { "companyname": "stark", "employee": [ { "name": "virginia potts"}, { "name": "tony stark"} ] }
- match: {'responses.0.total': 0}
- match: {'responses.1.total': 1}
- do:
mpercolate:
body:
- percolate: {"index": "nestedindex", "type": "company"}
- doc: { "companyname": "stark", "employee": [ { "name": "virginia potts"}, {"name": "tony stark"} ] }
- percolate: {"index": "nestedindex", "type": "company"}
- doc: { "companyname": "stark", "employee": [ { "name": "virginia stark"}, { "name": "tony potts"} ] }
- match: {'responses.0.total': 1}
- match: {'responses.1.total': 0}

View File

@ -1,72 +0,0 @@
{
"count_percolate": {
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/search-percolate.html",
"methods": ["GET", "POST"],
"url": {
"path": "/{index}/{type}/_percolate/count",
"paths": ["/{index}/{type}/_percolate/count", "/{index}/{type}/{id}/_percolate/count"],
"parts": {
"index": {
"type": "string",
"required": true,
"description": "The index of the document being count percolated."
},
"type": {
"type": "string",
"required": true,
"description": "The type of the document being count percolated."
},
"id": {
"type": "string",
"required": false,
"description": "Substitute the document in the request body with a document that is known by the specified id. On top of the id, the index and type parameter will be used to retrieve the document from within the cluster."
}
},
"params": {
"routing": {
"type": "list",
"description": "A comma-separated list of specific routing values"
},
"preference": {
"type": "string",
"description": "Specify the node or shard the operation should be performed on (default: random)"
},
"ignore_unavailable": {
"type": "boolean",
"description": "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
},
"allow_no_indices": {
"type": "boolean",
"description": "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
},
"expand_wildcards": {
"type": "enum",
"options": ["open", "closed","none","all"],
"default": "open",
"description": "Whether to expand wildcard expression to concrete indices that are open, closed or both."
},
"percolate_index": {
"type": "string",
"description": "The index to count percolate the document into. Defaults to index."
},
"percolate_type": {
"type": "string",
"description": "The type to count percolate document into. Defaults to type."
},
"version": {
"type": "number",
"description": "Explicit version number for concurrency control"
},
"version_type": {
"type": "enum",
"options": ["internal", "external", "external_gte", "force"],
"description": "Specific version type"
}
}
},
"body": {
"description": "The count percolator request definition using the percolate DSL",
"required": false
}
}
}

View File

@ -17,7 +17,7 @@
}, },
"metric" : { "metric" : {
"type" : "list", "type" : "list",
"options" : ["_all", "completion", "docs", "fielddata", "query_cache", "flush", "get", "indexing", "merge", "percolate", "request_cache", "refresh", "search", "segments", "store", "warmer", "suggest"], "options" : ["_all", "completion", "docs", "fielddata", "query_cache", "flush", "get", "indexing", "merge", "request_cache", "refresh", "search", "segments", "store", "warmer", "suggest"],
"description" : "Limit the information returned the specific metrics." "description" : "Limit the information returned the specific metrics."
} }
}, },

View File

@ -1,41 +0,0 @@
{
"mpercolate": {
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/search-percolate.html",
"methods": ["GET", "POST"],
"url": {
"path": "/_mpercolate",
"paths": ["/_mpercolate", "/{index}/_mpercolate", "/{index}/{type}/_mpercolate"],
"parts": {
"index": {
"type": "string",
"description": "The index of the document being count percolated to use as default"
},
"type": {
"type" : "string",
"description" : "The type of the document being percolated to use as default."
}
},
"params": {
"ignore_unavailable": {
"type": "boolean",
"description": "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
},
"allow_no_indices": {
"type": "boolean",
"description": "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
},
"expand_wildcards": {
"type": "enum",
"options": ["open", "closed","none","all"],
"default": "open",
"description": "Whether to expand wildcard expression to concrete indices that are open, closed or both."
}
}
},
"body": {
"description": "The percolate request definitions (header & body pair), separated by newlines",
"required": true,
"serialize" : "bulk"
}
}
}

View File

@ -20,7 +20,7 @@
}, },
"index_metric" : { "index_metric" : {
"type" : "list", "type" : "list",
"options" : ["_all", "completion", "docs", "fielddata", "query_cache", "flush", "get", "indexing", "merge", "percolate", "request_cache", "refresh", "search", "segments", "store", "warmer", "suggest"], "options" : ["_all", "completion", "docs", "fielddata", "query_cache", "flush", "get", "indexing", "merge", "request_cache", "refresh", "search", "segments", "store", "warmer", "suggest"],
"description" : "Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified." "description" : "Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified."
}, },
"node_id": { "node_id": {

View File

@ -1,85 +0,0 @@
{
"percolate": {
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/search-percolate.html",
"methods": ["GET", "POST"],
"url": {
"path": "/{index}/{type}/_percolate",
"paths": ["/{index}/{type}/_percolate", "/{index}/{type}/{id}/_percolate"],
"parts": {
"index": {
"type" : "string",
"required" : true,
"description" : "The index of the document being percolated."
},
"type": {
"type" : "string",
"required" : true,
"description" : "The type of the document being percolated."
},
"id": {
"type" : "string",
"required" : false,
"description" : "Substitute the document in the request body with a document that is known by the specified id. On top of the id, the index and type parameter will be used to retrieve the document from within the cluster."
}
},
"params": {
"routing": {
"type" : "list",
"description" : "A comma-separated list of specific routing values"
},
"preference": {
"type" : "string",
"description" : "Specify the node or shard the operation should be performed on (default: random)"
},
"ignore_unavailable": {
"type" : "boolean",
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
},
"allow_no_indices": {
"type" : "boolean",
"description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
},
"expand_wildcards": {
"type" : "enum",
"options" : ["open","closed","none","all"],
"default" : "open",
"description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both."
},
"percolate_index": {
"type" : "string",
"description" : "The index to percolate the document into. Defaults to index."
},
"percolate_type": {
"type" : "string",
"description" : "The type to percolate document into. Defaults to type."
},
"percolate_routing": {
"type" : "string",
"description" : "The routing value to use when percolating the existing document."
},
"percolate_preference": {
"type" : "string",
"description" : "Which shard to prefer when executing the percolate request."
},
"percolate_format": {
"type" : "enum",
"options" : ["ids"],
"description" : "Return an array of matching query IDs instead of objects"
},
"version" : {
"type" : "number",
"description" : "Explicit version number for concurrency control"
},
"version_type": {
"type" : "enum",
"options" : ["internal", "external", "external_gte", "force"],
"description" : "Specific version type"
}
}
},
"body": {
"description" : "The percolator request definition using the percolate DSL",
"required" : false
}
}
}