Add analyze API to high-level rest client (#31577)

This commit is contained in:
Alan Woodward 2018-07-03 15:57:02 +01:00 committed by GitHub
parent 093ea037b4
commit 1d114071da
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 720 additions and 8 deletions

View File

@ -23,6 +23,8 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@ -752,4 +754,32 @@ public final class IndicesClient {
restHighLevelClient.performRequestAsyncAndParseEntity(getIndexTemplatesRequest, RequestConverters::getTemplates,
options, GetIndexTemplatesResponse::fromXContent, listener, emptySet());
}
/**
* Calls the analyze API
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-analyze.html">Analyze API on elastic.co</a>
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
*/
public AnalyzeResponse analyze(AnalyzeRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::analyze, options,
AnalyzeResponse::fromXContent, emptySet());
}
/**
* Asynchronously calls the analyze API
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-analyze.html">Analyze API on elastic.co</a>
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void analyzeAsync(AnalyzeRequest request, RequestOptions options,
ActionListener<AnalyzeResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::analyze, options,
AnalyzeResponse::fromXContent, listener, emptySet());
}
}

View File

@ -45,6 +45,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptReque
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
@ -1019,6 +1020,18 @@ final class RequestConverters {
return request;
}
static Request analyze(AnalyzeRequest request) throws IOException {
EndpointBuilder builder = new EndpointBuilder();
String index = request.index();
if (index != null) {
builder.addPathPart(index);
}
builder.addPathPartAsIs("_analyze");
Request req = new Request(HttpGet.METHOD_NAME, builder.build());
req.setEntity(createEntity(request, REQUEST_BODY_CONTENT_TYPE));
return req;
}
static Request getScript(GetStoredScriptRequest getStoredScriptRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);

View File

@ -29,6 +29,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@ -1278,4 +1280,20 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
new GetIndexTemplatesRequest().names("the-template-*"), client.indices()::getTemplate, client.indices()::getTemplateAsync));
assertThat(notFound.status(), equalTo(RestStatus.NOT_FOUND));
}
public void testAnalyze() throws Exception {
RestHighLevelClient client = highLevelClient();
AnalyzeRequest noindexRequest = new AnalyzeRequest().text("One two three").analyzer("english");
AnalyzeResponse noindexResponse = execute(noindexRequest, client.indices()::analyze, client.indices()::analyzeAsync);
assertThat(noindexResponse.getTokens(), hasSize(3));
AnalyzeRequest detailsRequest = new AnalyzeRequest().text("One two three").analyzer("english").explain(true);
AnalyzeResponse detailsResponse = execute(detailsRequest, client.indices()::analyze, client.indices()::analyzeAsync);
assertNotNull(detailsResponse.detail());
}
}

View File

@ -47,6 +47,7 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
@ -2239,6 +2240,22 @@ public class RequestConvertersTests extends ESTestCase {
assertThat(request.getEntity(), nullValue());
}
public void testAnalyzeRequest() throws Exception {
AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
.text("Here is some text")
.index("test_index")
.analyzer("test_analyzer");
Request request = RequestConverters.analyze(indexAnalyzeRequest);
assertThat(request.getEndpoint(), equalTo("/test_index/_analyze"));
assertToXContentBody(indexAnalyzeRequest, request.getEntity());
AnalyzeRequest analyzeRequest = new AnalyzeRequest()
.text("more text")
.analyzer("test_analyzer");
assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze"));
}
public void testGetScriptRequest() {
GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script");
Map<String, String> expectedParams = new HashMap<>();

View File

@ -27,6 +27,9 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@ -2317,4 +2320,127 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testAnalyze() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
{
// tag::analyze-builtin-request
AnalyzeRequest request = new AnalyzeRequest();
request.text("Some text to analyze", "Some more text to analyze"); // <1>
request.analyzer("english"); // <2>
// end::analyze-builtin-request
}
{
// tag::analyze-custom-request
AnalyzeRequest request = new AnalyzeRequest();
request.text("<b>Some text to analyze</b>");
request.addCharFilter("html_strip"); // <1>
request.tokenizer("standard"); // <2>
request.addTokenFilter("lowercase"); // <3>
Map<String, Object> stopFilter = new HashMap<>();
stopFilter.put("type", "stop");
stopFilter.put("stopwords", new String[]{ "to" }); // <4>
request.addTokenFilter(stopFilter); // <5>
// end::analyze-custom-request
}
{
// tag::analyze-custom-normalizer-request
AnalyzeRequest request = new AnalyzeRequest();
request.text("<b>BaR</b>");
request.addTokenFilter("lowercase");
// end::analyze-custom-normalizer-request
// tag::analyze-request-explain
request.explain(true); // <1>
request.attributes("keyword", "type"); // <2>
// end::analyze-request-explain
// tag::analyze-request-sync
AnalyzeResponse response = client.indices().analyze(request, RequestOptions.DEFAULT);
// end::analyze-request-sync
// tag::analyze-response-tokens
List<AnalyzeResponse.AnalyzeToken> tokens = response.getTokens(); // <1>
// end::analyze-response-tokens
// tag::analyze-response-detail
DetailAnalyzeResponse detail = response.detail(); // <1>
// end::analyze-response-detail
assertNull(tokens);
assertNotNull(detail.tokenizer());
}
CreateIndexRequest req = new CreateIndexRequest("my_index");
CreateIndexResponse resp = client.indices().create(req, RequestOptions.DEFAULT);
assertTrue(resp.isAcknowledged());
PutMappingRequest pmReq = new PutMappingRequest()
.indices("my_index")
.type("_doc")
.source("my_field", "type=text,analyzer=english");
PutMappingResponse pmResp = client.indices().putMapping(pmReq, RequestOptions.DEFAULT);
assertTrue(pmResp.isAcknowledged());
{
// tag::analyze-index-request
AnalyzeRequest request = new AnalyzeRequest();
request.index("my_index"); // <1>
request.analyzer("my_analyzer"); // <2>
request.text("some text to analyze");
// end::analyze-index-request
// tag::analyze-execute-listener
ActionListener<AnalyzeResponse> listener = new ActionListener<AnalyzeResponse>() {
@Override
public void onResponse(AnalyzeResponse analyzeTokens) {
}
@Override
public void onFailure(Exception e) {
}
};
// end::analyze-execute-listener
// use a built-in analyzer in the test
request = new AnalyzeRequest();
request.index("my_index");
request.field("my_field");
request.text("some text to analyze");
// Use a blocking listener in the test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::analyze-request-async
client.indices().analyzeAsync(request, RequestOptions.DEFAULT, listener);
// end::analyze-request-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
{
// tag::analyze-index-normalizer-request
AnalyzeRequest request = new AnalyzeRequest();
request.index("my_index"); // <1>
request.normalizer("my_normalizer"); // <2>
request.text("some text to analyze");
// end::analyze-index-normalizer-request
}
{
// tag::analyze-field-request
AnalyzeRequest request = new AnalyzeRequest();
request.index("my_index");
request.field("my_field");
request.text("some text to analyze");
// end::analyze-field-request
}
}
}

View File

@ -0,0 +1,119 @@
[[java-rest-high-analyze]]
=== Analyze API
[[java-rest-high-analyze-request]]
==== Analyze Request
An `AnalyzeRequest` contains the text to analyze, and one of several options to
specify how the analysis should be performed.
The simplest version uses a built-in analyzer:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-builtin-request]
---------------------------------------------------
<1> The text to include. Multiple strings are treated as a multi-valued field
<2> A built-in analyzer
You can configure a custom analyzer:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-custom-request]
---------------------------------------------------
<1> Configure char filters
<2> Configure the tokenizer
<3> Add a built-in tokenfilter
<4> Configuration for a custom tokenfilter
<5> Add the custom tokenfilter
You can also build a custom normalizer, by including only charfilters and
tokenfilters:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-custom-normalizer-request]
---------------------------------------------------
You can analyze text using an analyzer defined in an existing index:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-index-request]
---------------------------------------------------
<1> The index containing the mappings
<2> The analyzer defined on this index to use
Or you can use a normalizer:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-index-normalizer-request]
---------------------------------------------------
<1> The index containing the mappings
<2> The normalizer defined on this index to use
You can analyze text using the mappings for a particular field in an index:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-field-request]
---------------------------------------------------
==== Optional arguemnts
The following arguments can also optionally be provided:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-explain]
---------------------------------------------------
<1> Setting `explain` to true will add further details to the response
<2> Setting `attributes` allows you to return only token attributes that you are
interested in
[[java-rest-high-analyze-sync]]
==== Synchronous Execution
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-sync]
---------------------------------------------------
[[java-rest-high-analyze-async]]
==== Asynchronous Execution
The asynchronous execution of an analyze request requires both the `AnalyzeRequest`
instance and an `ActionListener` instance to be passed to the asyncronous method:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-async]
---------------------------------------------------
The asynchronous method does not block and returns immediately. Once it is
completed the `ActionListener` is called back using the `onResponse` method if the
execution successfully completed or using the `onFailure` method if it failed.
A typical listener for `AnalyzeResponse` looks like:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-execute-listener]
---------------------------------------------------
[[java-rest-high-analyze-response]]
==== Analyze Response
The returned `AnalyzeResponse` allows you to retrieve details of the analysis as
follows:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-response-tokens]
---------------------------------------------------
<1> `AnalyzeToken` holds information about the individual tokens produced by analysis
If `explain` was set to `true`, then information is instead returned from the `detail()`
method:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-response-detail]
---------------------------------------------------
<1> `DetailAnalyzeResponse` holds more detailed information about tokens produced by
the various substeps in the analysis chain.

View File

@ -88,6 +88,7 @@ Alias Management::
* <<java-rest-high-exists-alias>>
* <<java-rest-high-get-alias>>
include::indices/analyze.asciidoc[]
include::indices/create_index.asciidoc[]
include::indices/delete_index.asciidoc[]
include::indices/indices_exists.asciidoc[]

View File

@ -26,6 +26,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
@ -42,7 +44,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
* A request to analyze a text associated with a specific index. Allow to provide
* the actual analyzer name to perform the analysis with.
*/
public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> implements ToXContentObject {
private String[] text;
@ -62,7 +64,7 @@ public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
private String normalizer;
public static class NameOrDefinition implements Writeable {
public static class NameOrDefinition implements Writeable, ToXContentFragment {
// exactly one of these two members is not null
public final String name;
public final Settings definition;
@ -102,6 +104,15 @@ public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
Settings.writeSettingsToStream(definition, out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (definition == null) {
return builder.value(name);
}
return definition.toXContent(builder, params);
}
}
public AnalyzeRequest() {
@ -171,6 +182,7 @@ public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
this.charFilters.add(new NameOrDefinition(charFilter));
return this;
}
public List<NameOrDefinition> charFilters() {
return this.charFilters;
}
@ -260,4 +272,36 @@ public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
out.writeOptionalString(normalizer);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("text", text);
if (Strings.isNullOrEmpty(analyzer) == false) {
builder.field("analyzer", analyzer);
}
if (tokenizer != null) {
tokenizer.toXContent(builder, params);
}
if (tokenFilters.size() > 0) {
builder.field("filter", tokenFilters);
}
if (charFilters.size() > 0) {
builder.field("char_filter", charFilters);
}
if (Strings.isNullOrEmpty(field) == false) {
builder.field("field", field);
}
if (explain) {
builder.field("explain", true);
}
if (attributes.length > 0) {
builder.field("attributes", attributes);
}
if (Strings.isNullOrEmpty(normalizer) == false) {
builder.field("normalizer", normalizer);
}
return builder.endObject();
}
}

View File

@ -20,17 +20,27 @@ package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeResponse.AnalyzeToken>, ToXContentObject {
@ -46,6 +56,25 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
AnalyzeToken() {
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeToken that = (AnalyzeToken) o;
return startOffset == that.startOffset &&
endOffset == that.endOffset &&
position == that.position &&
positionLength == that.positionLength &&
Objects.equals(term, that.term) &&
Objects.equals(attributes, that.attributes) &&
Objects.equals(type, that.type);
}
@Override
public int hashCode() {
return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type);
}
public AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength,
String type, Map<String, Object> attributes) {
this.term = term;
@ -97,7 +126,8 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
builder.field(Fields.POSITION_LENGTH, positionLength);
}
if (attributes != null && !attributes.isEmpty()) {
for (Map.Entry<String, Object> entity : attributes.entrySet()) {
Map<String, Object> sortedAttributes = new TreeMap<>(attributes);
for (Map.Entry<String, Object> entity : sortedAttributes.entrySet()) {
builder.field(entity.getKey(), entity.getValue());
}
}
@ -111,6 +141,50 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
return analyzeToken;
}
public static AnalyzeToken fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
String field = null;
String term = "";
int position = -1;
int startOffset = -1;
int endOffset = -1;
int positionLength = 1;
String type = "";
Map<String, Object> attributes = new HashMap<>();
for (XContentParser.Token t = parser.nextToken(); t != XContentParser.Token.END_OBJECT; t = parser.nextToken()) {
if (t == XContentParser.Token.FIELD_NAME) {
field = parser.currentName();
continue;
}
if (Fields.TOKEN.equals(field)) {
term = parser.text();
} else if (Fields.POSITION.equals(field)) {
position = parser.intValue();
} else if (Fields.START_OFFSET.equals(field)) {
startOffset = parser.intValue();
} else if (Fields.END_OFFSET.equals(field)) {
endOffset = parser.intValue();
} else if (Fields.POSITION_LENGTH.equals(field)) {
positionLength = parser.intValue();
} else if (Fields.TYPE.equals(field)) {
type = parser.text();
} else {
if (t == XContentParser.Token.VALUE_STRING) {
attributes.put(field, parser.text());
} else if (t == XContentParser.Token.VALUE_NUMBER) {
attributes.put(field, parser.numberValue());
} else if (t == XContentParser.Token.VALUE_BOOLEAN) {
attributes.put(field, parser.booleanValue());
} else if (t == XContentParser.Token.START_OBJECT) {
attributes.put(field, parser.map());
} else if (t == XContentParser.Token.START_ARRAY) {
attributes.put(field, parser.list());
}
}
}
return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes);
}
@Override
public void readFrom(StreamInput in) throws IOException {
term = in.readString();
@ -125,8 +199,11 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
positionLength = 1;
}
}
else {
positionLength = 1;
}
type = in.readOptionalString();
attributes = (Map<String, Object>) in.readGenericValue();
attributes = in.readMap();
}
@Override
@ -139,7 +216,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
out.writeOptionalVInt(positionLength > 1 ? positionLength : null);
}
out.writeOptionalString(type);
out.writeGenericValue(attributes);
out.writeMapWithConsistentOrder(attributes);
}
}
@ -188,6 +265,17 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
return builder;
}
private static final ConstructingObjectParser<AnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("analyze_response",
true, args -> new AnalyzeResponse((List<AnalyzeToken>) args[0], (DetailAnalyzeResponse) args[1]));
static {
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AnalyzeToken.fromXContent(p), new ParseField(Fields.TOKENS));
PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(Fields.DETAIL));
}
public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
@ -196,6 +284,9 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
for (int i = 0; i < size; i++) {
tokens.add(AnalyzeToken.readAnalyzeToken(in));
}
if (tokens.size() == 0) {
tokens = null;
}
detail = in.readOptionalStreamable(DetailAnalyzeResponse::new);
}
@ -213,6 +304,25 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
out.writeOptionalStreamable(detail);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeResponse that = (AnalyzeResponse) o;
return Objects.equals(detail, that.detail) &&
Objects.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
return Objects.hash(detail, tokens);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
static final class Fields {
static final String TOKENS = "tokens";
static final String TOKEN = "token";

View File

@ -20,27 +20,37 @@
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
DetailAnalyzeResponse() {
}
private boolean customAnalyzer = false;
private AnalyzeTokenList analyzer;
private CharFilteredText[] charfilters;
private AnalyzeTokenList tokenizer;
private AnalyzeTokenList[] tokenfilters;
DetailAnalyzeResponse() {
}
public DetailAnalyzeResponse(AnalyzeTokenList analyzer) {
this(false, analyzer, null, null, null);
}
@ -66,6 +76,7 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
}
public DetailAnalyzeResponse analyzer(AnalyzeTokenList analyzer) {
this.customAnalyzer = false;
this.analyzer = analyzer;
return this;
}
@ -75,6 +86,7 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
}
public DetailAnalyzeResponse charfilters(CharFilteredText[] charfilters) {
this.customAnalyzer = true;
this.charfilters = charfilters;
return this;
}
@ -84,6 +96,7 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
}
public DetailAnalyzeResponse tokenizer(AnalyzeTokenList tokenizer) {
this.customAnalyzer = true;
this.tokenizer = tokenizer;
return this;
}
@ -93,10 +106,31 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
}
public DetailAnalyzeResponse tokenfilters(AnalyzeTokenList[] tokenfilters) {
this.customAnalyzer = true;
this.tokenfilters = tokenfilters;
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
return customAnalyzer == that.customAnalyzer &&
Objects.equals(analyzer, that.analyzer) &&
Arrays.equals(charfilters, that.charfilters) &&
Objects.equals(tokenizer, that.tokenizer) &&
Arrays.equals(tokenfilters, that.tokenfilters);
}
@Override
public int hashCode() {
int result = Objects.hash(customAnalyzer, analyzer, tokenizer);
result = 31 * result + Arrays.hashCode(charfilters);
result = 31 * result + Arrays.hashCode(tokenfilters);
return result;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.CUSTOM_ANALYZER, customAnalyzer);
@ -131,6 +165,32 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return builder;
}
@SuppressWarnings("unchecked")
private static <T> T[] fromList(Class<T> clazz, List<T> list) {
if (list == null) {
return null;
}
return list.toArray((T[])Array.newInstance(clazz, 0));
}
static final ConstructingObjectParser<DetailAnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("detail",
true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1],
fromList(CharFilteredText.class, (List<CharFilteredText>)args[2]),
(AnalyzeTokenList) args[3],
fromList(AnalyzeTokenList.class, (List<AnalyzeTokenList>)args[4])));
static {
PARSER.declareBoolean(constructorArg(), new ParseField(Fields.CUSTOM_ANALYZER));
PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.ANALYZER));
PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField(Fields.CHARFILTERS));
PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENIZER));
PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENFILTERS));
}
public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
static final class Fields {
static final String NAME = "name";
static final String FILTERED_TEXT = "filtered_text";
@ -195,6 +255,22 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
private String name;
private AnalyzeResponse.AnalyzeToken[] tokens;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeTokenList that = (AnalyzeTokenList) o;
return Objects.equals(name, that.name) &&
Arrays.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(tokens);
return result;
}
AnalyzeTokenList() {
}
@ -235,6 +311,20 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return builder;
}
private static final ConstructingObjectParser<AnalyzeTokenList, Void> PARSER = new ConstructingObjectParser<>("token_list",
true, args -> new AnalyzeTokenList((String) args[0],
fromList(AnalyzeResponse.AnalyzeToken.class, (List<AnalyzeResponse.AnalyzeToken>)args[1])));
static {
PARSER.declareString(constructorArg(), new ParseField(Fields.NAME));
PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p),
new ParseField(AnalyzeResponse.Fields.TOKENS));
}
public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
@ -264,6 +354,7 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
public static class CharFilteredText implements Streamable, ToXContentObject {
private String name;
private String[] texts;
CharFilteredText() {
}
@ -293,6 +384,18 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return builder;
}
private static final ConstructingObjectParser<CharFilteredText, Void> PARSER = new ConstructingObjectParser<>("char_filtered_text",
true, args -> new CharFilteredText((String) args[0], ((List<String>) args[1]).toArray(new String[0])));
static {
PARSER.declareString(constructorArg(), new ParseField(Fields.NAME));
PARSER.declareStringArray(constructorArg(), new ParseField(Fields.FILTERED_TEXT));
}
public static CharFilteredText fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public static CharFilteredText readCharFilteredText(StreamInput in) throws IOException {
CharFilteredText text = new CharFilteredText();
text.readFrom(in);
@ -310,5 +413,21 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
out.writeString(name);
out.writeStringArray(texts);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CharFilteredText that = (CharFilteredText) o;
return Objects.equals(name, that.name) &&
Arrays.equals(texts, that.texts);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(texts);
return result;
}
}
}

View File

@ -0,0 +1,115 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase<AnalyzeResponse> {
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
return s -> s.contains("tokens.");
}
@Override
protected AnalyzeResponse doParseInstance(XContentParser parser) throws IOException {
return AnalyzeResponse.fromXContent(parser);
}
@Override
protected AnalyzeResponse createBlankInstance() {
return new AnalyzeResponse();
}
@Override
protected AnalyzeResponse createTestInstance() {
int tokenCount = randomIntBetween(1, 30);
AnalyzeResponse.AnalyzeToken[] tokens = new AnalyzeResponse.AnalyzeToken[tokenCount];
for (int i = 0; i < tokenCount; i++) {
tokens[i] = randomToken();
}
DetailAnalyzeResponse dar = null;
if (randomBoolean()) {
dar = new DetailAnalyzeResponse();
if (randomBoolean()) {
dar.charfilters(new DetailAnalyzeResponse.CharFilteredText[]{
new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"})
});
}
dar.tokenizer(new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens));
if (randomBoolean()) {
dar.tokenfilters(new DetailAnalyzeResponse.AnalyzeTokenList[]{
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens),
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens)
});
}
return new AnalyzeResponse(null, dar);
}
return new AnalyzeResponse(Arrays.asList(tokens), null);
}
private AnalyzeResponse.AnalyzeToken randomToken() {
String token = randomAlphaOfLengthBetween(1, 20);
int position = randomIntBetween(0, 1000);
int startOffset = randomIntBetween(0, 1000);
int endOffset = randomIntBetween(0, 1000);
int posLength = randomIntBetween(1, 5);
String type = randomAlphaOfLengthBetween(1, 20);
Map<String, Object> extras = new HashMap<>();
if (randomBoolean()) {
int entryCount = randomInt(6);
for (int i = 0; i < entryCount; i++) {
switch (randomInt(6)) {
case 0:
case 1:
case 2:
case 3:
String key = randomAlphaOfLength(5);
String value = randomAlphaOfLength(10);
extras.put(key, value);
break;
case 4:
String objkey = randomAlphaOfLength(5);
Map<String, String> obj = new HashMap<>();
obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
extras.put(objkey, obj);
break;
case 5:
String listkey = randomAlphaOfLength(5);
List<String> list = new ArrayList<>();
list.add(randomAlphaOfLength(4));
list.add(randomAlphaOfLength(6));
extras.put(listkey, list);
break;
}
}
}
return new AnalyzeResponse.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras);
}
}