Create client-only AnalyzeRequest/AnalyzeResponse classes (#42197)

This commit clones the existing AnalyzeRequest/AnalyzeResponse classes
to the high-level rest client, and adjusts request converters to use these new
classes.

This is a prerequisite to removing the Streamable interface from the internal
server version of these classes.
This commit is contained in:
Alan Woodward 2019-06-03 09:16:54 +01:00
parent 929215c0d5
commit 2129d06643
32 changed files with 2129 additions and 1439 deletions

View File

@ -22,8 +22,6 @@ package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@ -47,6 +45,8 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryReques
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.core.ShardsAcknowledgedResponse;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.AnalyzeResponse;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.client.indices.FreezeIndexRequest;

View File

@ -26,7 +26,6 @@ import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
@ -41,6 +40,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.FreezeIndexRequest;
import org.elasticsearch.client.indices.GetFieldMappingsRequest;

View File

@ -33,7 +33,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.explain.ExplainRequest;
@ -52,6 +51,7 @@ import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.MultiTermVectorsRequest;
import org.elasticsearch.client.core.TermVectorsRequest;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.security.RefreshPolicy;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Nullable;

View File

@ -0,0 +1,343 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* A request to analyze text
*/
public class AnalyzeRequest implements Validatable, ToXContentObject {
private String index;
private String[] text;
private String analyzer;
private NameOrDefinition tokenizer;
private final List<NameOrDefinition> tokenFilters = new ArrayList<>();
private final List<NameOrDefinition> charFilters = new ArrayList<>();
private String field;
private boolean explain = false;
private String[] attributes = Strings.EMPTY_ARRAY;
private String normalizer;
/**
* Analyzes text using a global analyzer
*/
public static AnalyzeRequest withGlobalAnalyzer(String analyzer, String... text) {
return new AnalyzeRequest(null, analyzer, null, null, text);
}
/**
* Analyzes text using a custom analyzer built from global components
*/
public static CustomAnalyzerBuilder buildCustomAnalyzer(String tokenizer) {
return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizer));
}
/**
* Analyzes text using a custom analyzer built from global components
*/
public static CustomAnalyzerBuilder buildCustomAnalyzer(Map<String, Object> tokenizerSettings) {
return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizerSettings));
}
/**
* Analyzes text using a custom analyzer built from components defined on an index
*/
public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, String tokenizer) {
return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizer));
}
/**
* Analyzes text using a custom analyzer built from components defined on an index
*/
public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, Map<String, Object> tokenizerSettings) {
return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizerSettings));
}
/**
* Analyzes text using a named analyzer on an index
*/
public static AnalyzeRequest withIndexAnalyzer(String index, String analyzer, String... text) {
return new AnalyzeRequest(index, analyzer, null, null, text);
}
/**
* Analyzes text using the analyzer defined on a specific field within an index
*/
public static AnalyzeRequest withField(String index, String field, String... text) {
return new AnalyzeRequest(index, null, null, field, text);
}
/**
* Analyzes text using a named normalizer on an index
*/
public static AnalyzeRequest withNormalizer(String index, String normalizer, String... text) {
return new AnalyzeRequest(index, null, normalizer, null, text);
}
/**
* Analyzes text using a custom normalizer built from global components
*/
public static CustomAnalyzerBuilder buildCustomNormalizer() {
return new CustomAnalyzerBuilder(null, null);
}
/**
* Analyzes text using a custom normalizer built from components defined on an index
*/
public static CustomAnalyzerBuilder buildCustomNormalizer(String index) {
return new CustomAnalyzerBuilder(index, null);
}
/**
* Helper class to build custom analyzer definitions
*/
public static class CustomAnalyzerBuilder {
final NameOrDefinition tokenizer;
final String index;
List<NameOrDefinition> charFilters = new ArrayList<>();
List<NameOrDefinition> tokenFilters = new ArrayList<>();
CustomAnalyzerBuilder(String index, NameOrDefinition tokenizer) {
this.tokenizer = tokenizer;
this.index = index;
}
public CustomAnalyzerBuilder addCharFilter(String name) {
charFilters.add(new NameOrDefinition(name));
return this;
}
public CustomAnalyzerBuilder addCharFilter(Map<String, Object> settings) {
charFilters.add(new NameOrDefinition(settings));
return this;
}
public CustomAnalyzerBuilder addTokenFilter(String name) {
tokenFilters.add(new NameOrDefinition(name));
return this;
}
public CustomAnalyzerBuilder addTokenFilter(Map<String, Object> settings) {
tokenFilters.add(new NameOrDefinition(settings));
return this;
}
public AnalyzeRequest build(String... text) {
return new AnalyzeRequest(index, tokenizer, charFilters, tokenFilters, text);
}
}
private AnalyzeRequest(String index, String analyzer, String normalizer, String field, String... text) {
this.index = index;
this.analyzer = analyzer;
this.normalizer = normalizer;
this.field = field;
this.text = text;
}
private AnalyzeRequest(String index, NameOrDefinition tokenizer, List<NameOrDefinition> charFilters,
List<NameOrDefinition> tokenFilters, String... text) {
this.index = index;
this.analyzer = null;
this.normalizer = null;
this.field = null;
this.tokenizer = tokenizer;
this.charFilters.addAll(charFilters);
this.tokenFilters.addAll(tokenFilters);
this.text = text;
}
static class NameOrDefinition implements ToXContentFragment {
// exactly one of these two members is not null
public final String name;
public final Settings definition;
NameOrDefinition(String name) {
this.name = Objects.requireNonNull(name);
this.definition = null;
}
NameOrDefinition(Settings settings) {
this.name = null;
this.definition = Objects.requireNonNull(settings);
}
NameOrDefinition(Map<String, ?> definition) {
this.name = null;
Objects.requireNonNull(definition);
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(definition);
this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse [" + definition + "]", e);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (definition == null) {
return builder.value(name);
}
builder.startObject();
definition.toXContent(builder, params);
builder.endObject();
return builder;
}
}
/**
* Returns the index that the request should be executed against, or {@code null} if
* no index is specified
*/
public String index() {
return this.index;
}
/**
* Returns the text to be analyzed
*/
public String[] text() {
return this.text;
}
/**
* Returns the named analyzer used for analysis, if defined
*/
public String analyzer() {
return this.analyzer;
}
/**
* Returns the named tokenizer used for analysis, if defined
*/
public String normalizer() {
return this.normalizer;
}
/**
* Returns a custom Tokenizer used for analysis, if defined
*/
public NameOrDefinition tokenizer() {
return this.tokenizer;
}
/**
* Returns the custom token filters used for analysis, if defined
*/
public List<NameOrDefinition> tokenFilters() {
return this.tokenFilters;
}
/**
* Returns the custom character filters used for analysis, if defined
*/
public List<NameOrDefinition> charFilters() {
return this.charFilters;
}
/**
* Returns the field to take an Analyzer from, if defined
*/
public String field() {
return this.field;
}
/**
* Set whether or not detailed explanations of analysis should be returned
*/
public AnalyzeRequest explain(boolean explain) {
this.explain = explain;
return this;
}
public boolean explain() {
return this.explain;
}
public AnalyzeRequest attributes(String... attributes) {
if (attributes == null) {
throw new IllegalArgumentException("attributes must not be null");
}
this.attributes = attributes;
return this;
}
public String[] attributes() {
return this.attributes;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("text", text);
if (Strings.isNullOrEmpty(analyzer) == false) {
builder.field("analyzer", analyzer);
}
if (tokenizer != null) {
builder.field("tokenizer", tokenizer);
}
if (tokenFilters.size() > 0) {
builder.field("filter", tokenFilters);
}
if (charFilters.size() > 0) {
builder.field("char_filter", charFilters);
}
if (Strings.isNullOrEmpty(field) == false) {
builder.field("field", field);
}
if (explain) {
builder.field("explain", true);
}
if (attributes.length > 0) {
builder.field("attributes", attributes);
}
if (Strings.isNullOrEmpty(normalizer) == false) {
builder.field("normalizer", normalizer);
}
return builder.endObject();
}
}

View File

@ -0,0 +1,183 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class AnalyzeResponse {
private static final String TOKENS = "tokens";
private static final String DETAIL = "detail";
public static class AnalyzeToken {
private String term;
private int startOffset;
private int endOffset;
private int position;
private int positionLength = 1;
private String type;
private final Map<String, Object> attributes = new HashMap<>();
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeResponse.AnalyzeToken that = (AnalyzeResponse.AnalyzeToken) o;
return startOffset == that.startOffset &&
endOffset == that.endOffset &&
position == that.position &&
positionLength == that.positionLength &&
Objects.equals(term, that.term) &&
Objects.equals(attributes, that.attributes) &&
Objects.equals(type, that.type);
}
@Override
public int hashCode() {
return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type);
}
public String getTerm() {
return this.term;
}
private void setTerm(String term) {
this.term = term;
}
public int getStartOffset() {
return this.startOffset;
}
private void setStartOffset(int startOffset) {
this.startOffset = startOffset;
}
public int getEndOffset() {
return this.endOffset;
}
private void setEndOffset(int endOffset) {
this.endOffset = endOffset;
}
public int getPosition() {
return this.position;
}
private void setPosition(int position) {
this.position = position;
}
public int getPositionLength() {
return this.positionLength;
}
private void setPositionLength(int positionLength) {
this.positionLength = positionLength;
}
public String getType() {
return this.type;
}
private void setType(String type) {
this.type = type;
}
public Map<String, Object> getAttributes() {
return this.attributes;
}
private void setAttribute(String key, Object value) {
this.attributes.put(key, value);
}
private static final ObjectParser<AnalyzeToken, Void> PARSER
= new ObjectParser<>("analyze_token", AnalyzeToken::setAttribute, AnalyzeToken::new);
static {
PARSER.declareString(AnalyzeToken::setTerm, new ParseField("token"));
PARSER.declareString(AnalyzeToken::setType, new ParseField("type"));
PARSER.declareInt(AnalyzeToken::setPosition, new ParseField("position"));
PARSER.declareInt(AnalyzeToken::setStartOffset, new ParseField("start_offset"));
PARSER.declareInt(AnalyzeToken::setEndOffset, new ParseField("end_offset"));
PARSER.declareInt(AnalyzeToken::setPositionLength, new ParseField("positionLength"));
}
public static AnalyzeToken fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}
private final DetailAnalyzeResponse detail;
private final List<AnalyzeResponse.AnalyzeToken> tokens;
private AnalyzeResponse(List<AnalyzeResponse.AnalyzeToken> tokens, DetailAnalyzeResponse detail) {
this.tokens = tokens;
this.detail = detail;
}
public List<AnalyzeResponse.AnalyzeToken> getTokens() {
return this.tokens;
}
public DetailAnalyzeResponse detail() {
return this.detail;
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<AnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("analyze_response",
true, args -> new AnalyzeResponse((List<AnalyzeResponse.AnalyzeToken>) args[0], (DetailAnalyzeResponse) args[1]));
static {
PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeToken.PARSER, new ParseField(TOKENS));
PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(DETAIL));
}
public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeResponse that = (AnalyzeResponse) o;
return Objects.equals(detail, that.detail) &&
Objects.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
return Objects.hash(detail, tokens);
}
}

View File

@ -0,0 +1,214 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DetailAnalyzeResponse {
private final boolean customAnalyzer;
private final AnalyzeTokenList analyzer;
private final CharFilteredText[] charfilters;
private final AnalyzeTokenList tokenizer;
private final AnalyzeTokenList[] tokenfilters;
private DetailAnalyzeResponse(boolean customAnalyzer,
AnalyzeTokenList analyzer,
List<CharFilteredText> charfilters,
AnalyzeTokenList tokenizer,
List<AnalyzeTokenList> tokenfilters) {
this.customAnalyzer = customAnalyzer;
this.analyzer = analyzer;
this.charfilters = charfilters == null ? null : charfilters.toArray(new CharFilteredText[]{});
this.tokenizer = tokenizer;
this.tokenfilters = tokenfilters == null ? null : tokenfilters.toArray(new AnalyzeTokenList[]{});
}
public AnalyzeTokenList analyzer() {
return this.analyzer;
}
public CharFilteredText[] charfilters() {
return this.charfilters;
}
public AnalyzeTokenList tokenizer() {
return tokenizer;
}
public AnalyzeTokenList[] tokenfilters() {
return tokenfilters;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
return customAnalyzer == that.customAnalyzer &&
Objects.equals(analyzer, that.analyzer) &&
Arrays.equals(charfilters, that.charfilters) &&
Objects.equals(tokenizer, that.tokenizer) &&
Arrays.equals(tokenfilters, that.tokenfilters);
}
@Override
public int hashCode() {
int result = Objects.hash(customAnalyzer, analyzer, tokenizer);
result = 31 * result + Arrays.hashCode(charfilters);
result = 31 * result + Arrays.hashCode(tokenfilters);
return result;
}
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<DetailAnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("detail",
true, args -> new DetailAnalyzeResponse(
(boolean) args[0],
(AnalyzeTokenList) args[1],
(List<CharFilteredText>)args[2],
(AnalyzeTokenList) args[3],
(List<AnalyzeTokenList>)args[4]));
static {
PARSER.declareBoolean(constructorArg(), new ParseField("custom_analyzer"));
PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("analyzer"));
PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField("charfilters"));
PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("tokenizer"));
PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("tokenfilters"));
}
public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public static class AnalyzeTokenList {
private final String name;
private final AnalyzeResponse.AnalyzeToken[] tokens;
private static final String TOKENS = "tokens";
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeTokenList that = (AnalyzeTokenList) o;
return Objects.equals(name, that.name) &&
Arrays.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(tokens);
return result;
}
public AnalyzeTokenList(String name, List<AnalyzeResponse.AnalyzeToken> tokens) {
this.name = name;
this.tokens = tokens.toArray(new AnalyzeResponse.AnalyzeToken[]{});
}
public String getName() {
return name;
}
public AnalyzeResponse.AnalyzeToken[] getTokens() {
return tokens;
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<AnalyzeTokenList, Void> PARSER = new ConstructingObjectParser<>("token_list",
true, args -> new AnalyzeTokenList((String) args[0],
(List<AnalyzeResponse.AnalyzeToken>)args[1]));
static {
PARSER.declareString(constructorArg(), new ParseField("name"));
PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p),
new ParseField("tokens"));
}
public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}
public static class CharFilteredText {
private final String name;
private final String[] texts;
CharFilteredText(String name, String[] texts) {
this.name = name;
if (texts != null) {
this.texts = texts;
} else {
this.texts = Strings.EMPTY_ARRAY;
}
}
public String getName() {
return name;
}
public String[] getTexts() {
return texts;
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<CharFilteredText, Void> PARSER = new ConstructingObjectParser<>("char_filtered_text",
true, args -> new CharFilteredText((String) args[0], ((List<String>) args[1]).toArray(new String[0])));
static {
PARSER.declareString(constructorArg(), new ParseField("name"));
PARSER.declareStringArray(constructorArg(), new ParseField("filtered_text"));
}
public static CharFilteredText fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CharFilteredText that = (CharFilteredText) o;
return Objects.equals(name, that.name) &&
Arrays.equals(texts, that.texts);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(texts);
return result;
}
}
}

View File

@ -28,8 +28,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@ -58,6 +56,8 @@ import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.core.ShardsAcknowledgedResponse;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.AnalyzeResponse;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.client.indices.FreezeIndexRequest;
@ -1852,12 +1852,12 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
AnalyzeRequest noindexRequest = new AnalyzeRequest().text("One two three").analyzer("english");
AnalyzeRequest noindexRequest = AnalyzeRequest.withGlobalAnalyzer("english", "One two three");
AnalyzeResponse noindexResponse = execute(noindexRequest, client.indices()::analyze, client.indices()::analyzeAsync);
assertThat(noindexResponse.getTokens(), hasSize(3));
AnalyzeRequest detailsRequest = new AnalyzeRequest().text("One two three").analyzer("english").explain(true);
AnalyzeRequest detailsRequest = AnalyzeRequest.withGlobalAnalyzer("english", "One two three").explain(true);
AnalyzeResponse detailsResponse = execute(detailsRequest, client.indices()::analyze, client.indices()::analyzeAsync);
assertNotNull(detailsResponse.detail());

View File

@ -29,7 +29,6 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
@ -45,6 +44,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.GetFieldMappingsRequest;
import org.elasticsearch.client.indices.GetIndexRequest;
@ -86,18 +86,14 @@ import static org.hamcrest.Matchers.nullValue;
public class IndicesRequestConvertersTests extends ESTestCase {
public void testAnalyzeRequest() throws Exception {
AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
.text("Here is some text")
.index("test_index")
.analyzer("test_analyzer");
AnalyzeRequest indexAnalyzeRequest
= AnalyzeRequest.withIndexAnalyzer("test_index", "test_analyzer", "Here is some text");
Request request = IndicesRequestConverters.analyze(indexAnalyzeRequest);
assertThat(request.getEndpoint(), equalTo("/test_index/_analyze"));
RequestConvertersTests.assertToXContentBody(indexAnalyzeRequest, request.getEntity());
AnalyzeRequest analyzeRequest = new AnalyzeRequest()
.text("more text")
.analyzer("test_analyzer");
AnalyzeRequest analyzeRequest = AnalyzeRequest.withGlobalAnalyzer("test_analyzer", "more text");
assertThat(IndicesRequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze"));
}

View File

@ -32,7 +32,6 @@ import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkShardRequest;
import org.elasticsearch.action.delete.DeleteRequest;
@ -57,6 +56,7 @@ import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.MultiTermVectorsRequest;
import org.elasticsearch.client.core.TermVectorsRequest;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
@ -1643,18 +1643,14 @@ public class RequestConvertersTests extends ESTestCase {
}
public void testAnalyzeRequest() throws Exception {
AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
.text("Here is some text")
.index("test_index")
.analyzer("test_analyzer");
AnalyzeRequest indexAnalyzeRequest
= AnalyzeRequest.withIndexAnalyzer("test_index", "test_analyzer", "Here is some text");
Request request = RequestConverters.analyze(indexAnalyzeRequest);
assertThat(request.getEndpoint(), equalTo("/test_index/_analyze"));
assertToXContentBody(indexAnalyzeRequest, request.getEntity());
AnalyzeRequest analyzeRequest = new AnalyzeRequest()
.text("more text")
.analyzer("test_analyzer");
AnalyzeRequest analyzeRequest = AnalyzeRequest.withGlobalAnalyzer("test_analyzer", "more text");
assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze"));
}

View File

@ -26,9 +26,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
@ -62,8 +59,11 @@ import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.SyncedFlushResponse;
import org.elasticsearch.client.core.ShardsAcknowledgedResponse;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.AnalyzeResponse;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.client.indices.DetailAnalyzeResponse;
import org.elasticsearch.client.indices.FreezeIndexRequest;
import org.elasticsearch.client.indices.GetFieldMappingsRequest;
import org.elasticsearch.client.indices.GetFieldMappingsResponse;
@ -2418,32 +2418,29 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
{
// tag::analyze-builtin-request
AnalyzeRequest request = new AnalyzeRequest();
request.text("Some text to analyze", "Some more text to analyze"); // <1>
request.analyzer("english"); // <2>
AnalyzeRequest request = AnalyzeRequest.withGlobalAnalyzer("english", // <1>
"Some text to analyze", "Some more text to analyze"); // <2>
// end::analyze-builtin-request
}
{
// tag::analyze-custom-request
AnalyzeRequest request = new AnalyzeRequest();
request.text("<b>Some text to analyze</b>");
request.addCharFilter("html_strip"); // <1>
request.tokenizer("standard"); // <2>
request.addTokenFilter("lowercase"); // <3>
Map<String, Object> stopFilter = new HashMap<>();
stopFilter.put("type", "stop");
stopFilter.put("stopwords", new String[]{ "to" }); // <4>
request.addTokenFilter(stopFilter); // <5>
stopFilter.put("stopwords", new String[]{ "to" }); // <1>
AnalyzeRequest request = AnalyzeRequest.buildCustomAnalyzer("standard") // <2>
.addCharFilter("html_strip") // <3>
.addTokenFilter("lowercase") // <4>
.addTokenFilter(stopFilter) // <5>
.build("<b>Some text to analyze</b>");
// end::analyze-custom-request
}
{
// tag::analyze-custom-normalizer-request
AnalyzeRequest request = new AnalyzeRequest();
request.text("<b>BaR</b>");
request.addTokenFilter("lowercase");
AnalyzeRequest request = AnalyzeRequest.buildCustomNormalizer()
.addTokenFilter("lowercase")
.build("<b>BaR</b>");
// end::analyze-custom-normalizer-request
// tag::analyze-request-explain
@ -2484,10 +2481,11 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
{
// tag::analyze-index-request
AnalyzeRequest request = new AnalyzeRequest();
request.index("my_index"); // <1>
request.analyzer("my_analyzer"); // <2>
request.text("some text to analyze");
AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer(
"my_index", // <1>
"my_analyzer", // <2>
"some text to analyze"
);
// end::analyze-index-request
// tag::analyze-execute-listener
@ -2505,10 +2503,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
// end::analyze-execute-listener
// use a built-in analyzer in the test
request = new AnalyzeRequest();
request.index("my_index");
request.field("my_field");
request.text("some text to analyze");
request = AnalyzeRequest.withField("my_index", "my_field", "some text to analyze");
// Use a blocking listener in the test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
@ -2522,19 +2517,17 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
{
// tag::analyze-index-normalizer-request
AnalyzeRequest request = new AnalyzeRequest();
request.index("my_index"); // <1>
request.normalizer("my_normalizer"); // <2>
request.text("some text to analyze");
AnalyzeRequest request = AnalyzeRequest.withNormalizer(
"my_index", // <1>
"my_normalizer", // <2>
"some text to analyze"
);
// end::analyze-index-normalizer-request
}
{
// tag::analyze-field-request
AnalyzeRequest request = new AnalyzeRequest();
request.index("my_index");
request.field("my_field");
request.text("some text to analyze");
AnalyzeRequest request = AnalyzeRequest.withField("my_index", "my_field", "some text to analyze");
// end::analyze-field-request
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class AnalyzeGlobalRequestTests extends AnalyzeRequestTests {
private static final Map<String, Object> charFilterConfig = new HashMap<>();
static {
charFilterConfig.put("type", "html_strip");
}
private static final Map<String, Object> tokenFilterConfig = new HashMap<>();
static {
tokenFilterConfig.put("type", "synonym");
}
@Override
protected AnalyzeRequest createClientTestInstance() {
int option = random().nextInt(3);
switch (option) {
case 0:
return AnalyzeRequest.withGlobalAnalyzer("my_analyzer", "some text", "some more text");
case 1:
return AnalyzeRequest.buildCustomAnalyzer("my_tokenizer")
.addCharFilter("my_char_filter")
.addCharFilter(charFilterConfig)
.addTokenFilter("my_token_filter")
.addTokenFilter(tokenFilterConfig)
.build("some text", "some more text");
case 2:
return AnalyzeRequest.buildCustomNormalizer()
.addCharFilter("my_char_filter")
.addCharFilter(charFilterConfig)
.addTokenFilter("my_token_filter")
.addTokenFilter(tokenFilterConfig)
.build("some text", "some more text");
}
throw new IllegalStateException("nextInt(3) has returned a value greater than 2");
}
@Override
protected AnalyzeAction.Request doParseToServerInstance(XContentParser parser) throws IOException {
return AnalyzeAction.Request.fromXContent(parser, null);
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class AnalyzeIndexRequestTests extends AnalyzeRequestTests {
private static final Map<String, Object> charFilterConfig = new HashMap<>();
static {
charFilterConfig.put("type", "html_strip");
}
private static final Map<String, Object> tokenFilterConfig = new HashMap<>();
static {
tokenFilterConfig.put("type", "synonym");
}
@Override
protected AnalyzeRequest createClientTestInstance() {
int option = random().nextInt(5);
switch (option) {
case 0:
return AnalyzeRequest.withField("index", "field", "some text", "some more text");
case 1:
return AnalyzeRequest.withIndexAnalyzer("index", "my_analyzer", "some text", "some more text");
case 2:
return AnalyzeRequest.withNormalizer("index", "my_normalizer", "text", "more text");
case 3:
return AnalyzeRequest.buildCustomAnalyzer("index", "my_tokenizer")
.addCharFilter("my_char_filter")
.addCharFilter(charFilterConfig)
.addTokenFilter("my_token_filter")
.addTokenFilter(tokenFilterConfig)
.build("some text", "some more text");
case 4:
return AnalyzeRequest.buildCustomNormalizer("index")
.addCharFilter("my_char_filter")
.addCharFilter(charFilterConfig)
.addTokenFilter("my_token_filter")
.addTokenFilter(tokenFilterConfig)
.build("some text", "some more text");
}
throw new IllegalStateException("nextInt(5) has returned a value greater than 4");
}
@Override
protected AnalyzeAction.Request doParseToServerInstance(XContentParser parser) throws IOException {
return AnalyzeAction.Request.fromXContent(parser, "index");
}
}

View File

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.client.AbstractRequestTestCase;
public abstract class AnalyzeRequestTests extends AbstractRequestTestCase<AnalyzeRequest, AnalyzeAction.Request> {
@Override
protected void assertInstances(AnalyzeAction.Request serverInstance, AnalyzeRequest clientTestInstance) {
assertEquals(serverInstance.index(), clientTestInstance.index());
assertArrayEquals(serverInstance.text(), clientTestInstance.text());
assertEquals(serverInstance.analyzer(), clientTestInstance.analyzer());
assertEquals(serverInstance.normalizer(), clientTestInstance.normalizer());
assertEquals(serverInstance.charFilters().size(), clientTestInstance.charFilters().size());
for (int i = 0; i < serverInstance.charFilters().size(); i++) {
assertEquals(serverInstance.charFilters().get(i).name, clientTestInstance.charFilters().get(i).name);
assertEquals(serverInstance.charFilters().get(i).definition, clientTestInstance.charFilters().get(i).definition);
}
assertEquals(serverInstance.tokenFilters().size(), clientTestInstance.tokenFilters().size());
for (int i = 0; i < serverInstance.tokenFilters().size(); i++) {
assertEquals(serverInstance.tokenFilters().get(i).name, clientTestInstance.tokenFilters().get(i).name);
assertEquals(serverInstance.tokenFilters().get(i).definition, clientTestInstance.tokenFilters().get(i).definition);
}
if (serverInstance.tokenizer() != null) {
assertEquals(serverInstance.tokenizer().name, clientTestInstance.tokenizer().name);
assertEquals(serverInstance.tokenizer().definition, clientTestInstance.tokenizer().definition);
}
else {
assertNull(clientTestInstance.tokenizer());
}
assertEquals(serverInstance.field(), clientTestInstance.field());
assertEquals(serverInstance.explain(), clientTestInstance.explain());
assertArrayEquals(serverInstance.attributes(), clientTestInstance.attributes());
}
}

View File

@ -0,0 +1,174 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.indices;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class AnalyzeResponseTests extends AbstractResponseTestCase<AnalyzeAction.Response, AnalyzeResponse> {
@Override
protected AnalyzeAction.Response createServerTestInstance() {
int tokenCount = randomIntBetween(1, 30);
AnalyzeAction.AnalyzeToken[] tokens = new AnalyzeAction.AnalyzeToken[tokenCount];
for (int i = 0; i < tokenCount; i++) {
tokens[i] = randomToken();
}
if (randomBoolean()) {
AnalyzeAction.CharFilteredText[] charfilters = null;
AnalyzeAction.AnalyzeTokenList[] tokenfilters = null;
if (randomBoolean()) {
charfilters = new AnalyzeAction.CharFilteredText[]{
new AnalyzeAction.CharFilteredText("my_charfilter", new String[]{"one two"})
};
}
if (randomBoolean()) {
tokenfilters = new AnalyzeAction.AnalyzeTokenList[]{
new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_1", tokens),
new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_2", tokens)
};
}
AnalyzeAction.DetailAnalyzeResponse dar = new AnalyzeAction.DetailAnalyzeResponse(
charfilters,
new AnalyzeAction.AnalyzeTokenList("my_tokenizer", tokens),
tokenfilters);
return new AnalyzeAction.Response(null, dar);
}
return new AnalyzeAction.Response(Arrays.asList(tokens), null);
}
private AnalyzeAction.AnalyzeToken randomToken() {
String token = randomAlphaOfLengthBetween(1, 20);
int position = randomIntBetween(0, 1000);
int startOffset = randomIntBetween(0, 1000);
int endOffset = randomIntBetween(0, 1000);
int posLength = randomIntBetween(1, 5);
String type = randomAlphaOfLengthBetween(1, 20);
Map<String, Object> extras = new HashMap<>();
if (randomBoolean()) {
int entryCount = randomInt(6);
for (int i = 0; i < entryCount; i++) {
switch (randomInt(6)) {
case 0:
case 1:
case 2:
case 3:
String key = randomAlphaOfLength(5);
String value = randomAlphaOfLength(10);
extras.put(key, value);
break;
case 4:
String objkey = randomAlphaOfLength(5);
Map<String, String> obj = new HashMap<>();
obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
extras.put(objkey, obj);
break;
case 5:
String listkey = randomAlphaOfLength(5);
List<String> list = new ArrayList<>();
list.add(randomAlphaOfLength(4));
list.add(randomAlphaOfLength(6));
extras.put(listkey, list);
break;
}
}
}
return new AnalyzeAction.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras);
}
@Override
protected AnalyzeResponse doParseToClientInstance(XContentParser parser) throws IOException {
return AnalyzeResponse.fromXContent(parser);
}
@Override
protected void assertInstances(AnalyzeAction.Response serverTestInstance, AnalyzeResponse clientInstance) {
if (serverTestInstance.detail() != null) {
assertNotNull(clientInstance.detail());
assertInstances(serverTestInstance.detail(), clientInstance.detail());
}
else {
assertEquals(serverTestInstance.getTokens().size(), clientInstance.getTokens().size());
for (int i = 0; i < serverTestInstance.getTokens().size(); i++) {
assertEqualTokens(serverTestInstance.getTokens().get(0), clientInstance.getTokens().get(0));
}
}
}
private static void assertEqualTokens(AnalyzeAction.AnalyzeToken serverToken, AnalyzeResponse.AnalyzeToken clientToken) {
assertEquals(serverToken.getTerm(), clientToken.getTerm());
assertEquals(serverToken.getPosition(), clientToken.getPosition());
assertEquals(serverToken.getPositionLength(), clientToken.getPositionLength());
assertEquals(serverToken.getStartOffset(), clientToken.getStartOffset());
assertEquals(serverToken.getEndOffset(), clientToken.getEndOffset());
assertEquals(serverToken.getType(), clientToken.getType());
assertEquals(serverToken.getAttributes(), clientToken.getAttributes());
}
private static void assertInstances(AnalyzeAction.DetailAnalyzeResponse serverResponse, DetailAnalyzeResponse clientResponse) {
assertInstances(serverResponse.analyzer(), clientResponse.analyzer());
assertInstances(serverResponse.tokenizer(), clientResponse.tokenizer());
if (serverResponse.tokenfilters() == null) {
assertNull(clientResponse.tokenfilters());
}
else {
assertEquals(serverResponse.tokenfilters().length, clientResponse.tokenfilters().length);
for (int i = 0; i < serverResponse.tokenfilters().length; i++) {
assertInstances(serverResponse.tokenfilters()[i], clientResponse.tokenfilters()[i]);
}
}
if (serverResponse.charfilters() == null) {
assertNull(clientResponse.charfilters());
}
else {
assertEquals(serverResponse.charfilters().length, clientResponse.charfilters().length);
for (int i = 0; i < serverResponse.charfilters().length; i++) {
assertInstances(serverResponse.charfilters()[i], clientResponse.charfilters()[i]);
}
}
}
private static void assertInstances(AnalyzeAction.AnalyzeTokenList serverTokens,
DetailAnalyzeResponse.AnalyzeTokenList clientTokens) {
if (serverTokens == null) {
assertNull(clientTokens);
}
else {
assertEquals(serverTokens.getName(), clientTokens.getName());
assertEquals(serverTokens.getTokens().length, clientTokens.getTokens().length);
for (int i = 0; i < serverTokens.getTokens().length; i++) {
assertEqualTokens(serverTokens.getTokens()[i], clientTokens.getTokens()[i]);
}
}
}
private static void assertInstances(AnalyzeAction.CharFilteredText serverText, DetailAnalyzeResponse.CharFilteredText clientText) {
assertEquals(serverText.getName(), clientText.getName());
assertArrayEquals(serverText.getTexts(), clientText.getTexts());
}
}

View File

@ -19,18 +19,18 @@ The simplest version uses a built-in analyzer:
---------------------------------------------------
include-tagged::{doc-tests-file}[{api}-builtin-request]
---------------------------------------------------
<1> The text to include. Multiple strings are treated as a multi-valued field
<2> A built-in analyzer
<1> A built-in analyzer
<2> The text to include. Multiple strings are treated as a multi-valued field
You can configure a custom analyzer:
["source","java",subs="attributes,callouts,macros"]
---------------------------------------------------
include-tagged::{doc-tests-file}[{api}-custom-request]
---------------------------------------------------
<1> Configure char filters
<1> Configuration for a custom tokenfilter
<2> Configure the tokenizer
<3> Add a built-in tokenfilter
<4> Configuration for a custom tokenfilter
<3> Configure char filters
<4> Add a built-in tokenfilter
<5> Add the custom tokenfilter
You can also build a custom normalizer, by including only charfilters and

View File

@ -20,9 +20,35 @@
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
public class AnalyzeAction extends Action<AnalyzeResponse> {
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class AnalyzeAction extends Action<AnalyzeAction.Response> {
public static final AnalyzeAction INSTANCE = new AnalyzeAction();
public static final String NAME = "indices:admin/analyze";
@ -32,12 +58,802 @@ public class AnalyzeAction extends Action<AnalyzeResponse> {
}
@Override
public Writeable.Reader<AnalyzeResponse> getResponseReader() {
return AnalyzeResponse::new;
public Writeable.Reader<Response> getResponseReader() {
return Response::new;
}
@Override
public AnalyzeResponse newResponse() {
public Response newResponse() {
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
/**
* A request to analyze a text associated with a specific index. Allow to provide
* the actual analyzer name to perform the analysis with.
*/
public static class Request extends SingleShardRequest<Request> {
private String[] text;
private String analyzer;
private NameOrDefinition tokenizer;
private final List<NameOrDefinition> tokenFilters = new ArrayList<>();
private final List<NameOrDefinition> charFilters = new ArrayList<>();
private String field;
private boolean explain = false;
private String[] attributes = Strings.EMPTY_ARRAY;
private String normalizer;
public static class NameOrDefinition implements Writeable {
// exactly one of these two members is not null
public final String name;
public final Settings definition;
NameOrDefinition(String name) {
this.name = Objects.requireNonNull(name);
this.definition = null;
}
NameOrDefinition(Map<String, ?> definition) {
this.name = null;
Objects.requireNonNull(definition);
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(definition);
this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse [" + definition + "]", e);
}
}
NameOrDefinition(StreamInput in) throws IOException {
name = in.readOptionalString();
if (in.readBoolean()) {
definition = Settings.readSettingsFromStream(in);
} else {
definition = null;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(name);
boolean isNotNullDefinition = this.definition != null;
out.writeBoolean(isNotNullDefinition);
if (isNotNullDefinition) {
Settings.writeSettingsToStream(definition, out);
}
}
public static NameOrDefinition fromXContent(XContentParser parser) throws IOException {
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
return new NameOrDefinition(parser.text());
}
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
return new NameOrDefinition(parser.map());
}
throw new XContentParseException(parser.getTokenLocation(),
"Expected [VALUE_STRING] or [START_OBJECT], got " + parser.currentToken());
}
}
public Request() {
}
/**
* Constructs a new analyzer request for the provided index.
*
* @param index The text to analyze
*/
public Request(String index) {
this.index(index);
}
public String[] text() {
return this.text;
}
public Request text(String... text) {
this.text = text;
return this;
}
public Request text(List<String> text) {
this.text = text.toArray(new String[]{});
return this;
}
public Request analyzer(String analyzer) {
this.analyzer = analyzer;
return this;
}
public String analyzer() {
return this.analyzer;
}
public Request tokenizer(String tokenizer) {
this.tokenizer = new NameOrDefinition(tokenizer);
return this;
}
public Request tokenizer(Map<String, ?> tokenizer) {
this.tokenizer = new NameOrDefinition(tokenizer);
return this;
}
public void tokenizer(NameOrDefinition tokenizer) {
this.tokenizer = tokenizer;
}
public NameOrDefinition tokenizer() {
return this.tokenizer;
}
public Request addTokenFilter(String tokenFilter) {
this.tokenFilters.add(new NameOrDefinition(tokenFilter));
return this;
}
public Request addTokenFilter(Map<String, ?> tokenFilter) {
this.tokenFilters.add(new NameOrDefinition(tokenFilter));
return this;
}
public void setTokenFilters(List<NameOrDefinition> tokenFilters) {
this.tokenFilters.addAll(tokenFilters);
}
public List<NameOrDefinition> tokenFilters() {
return this.tokenFilters;
}
public Request addCharFilter(Map<String, ?> charFilter) {
this.charFilters.add(new NameOrDefinition(charFilter));
return this;
}
public Request addCharFilter(String charFilter) {
this.charFilters.add(new NameOrDefinition(charFilter));
return this;
}
public void setCharFilters(List<NameOrDefinition> charFilters) {
this.charFilters.addAll(charFilters);
}
public List<NameOrDefinition> charFilters() {
return this.charFilters;
}
public Request field(String field) {
this.field = field;
return this;
}
public String field() {
return this.field;
}
public Request explain(boolean explain) {
this.explain = explain;
return this;
}
public boolean explain() {
return this.explain;
}
public Request attributes(String... attributes) {
if (attributes == null) {
throw new IllegalArgumentException("attributes must not be null");
}
this.attributes = attributes;
return this;
}
public void attributes(List<String> attributes) {
this.attributes = attributes.toArray(new String[]{});
}
public String[] attributes() {
return this.attributes;
}
public String normalizer() {
return this.normalizer;
}
public Request normalizer(String normalizer) {
this.normalizer = normalizer;
return this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (text == null || text.length == 0) {
validationException = addValidationError("text is missing", validationException);
}
if ((index == null || index.length() == 0) && normalizer != null) {
validationException = addValidationError("index is required if normalizer is specified", validationException);
}
if (normalizer != null && (tokenizer != null || analyzer != null)) {
validationException
= addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException);
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
text = in.readStringArray();
analyzer = in.readOptionalString();
tokenizer = in.readOptionalWriteable(NameOrDefinition::new);
tokenFilters.addAll(in.readList(NameOrDefinition::new));
charFilters.addAll(in.readList(NameOrDefinition::new));
field = in.readOptionalString();
explain = in.readBoolean();
attributes = in.readStringArray();
normalizer = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(text);
out.writeOptionalString(analyzer);
out.writeOptionalWriteable(tokenizer);
out.writeList(tokenFilters);
out.writeList(charFilters);
out.writeOptionalString(field);
out.writeBoolean(explain);
out.writeStringArray(attributes);
out.writeOptionalString(normalizer);
}
public static Request fromXContent(XContentParser parser, String index) throws IOException {
Request request = new Request(index);
PARSER.parse(parser, request, null);
return request;
}
private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>("analyze_request", null);
static {
PARSER.declareStringArray(Request::text, new ParseField("text"));
PARSER.declareString(Request::analyzer, new ParseField("analyzer"));
PARSER.declareField(Request::tokenizer, (p, c) -> NameOrDefinition.fromXContent(p),
new ParseField("tokenizer"), ObjectParser.ValueType.OBJECT_OR_STRING);
PARSER.declareObjectArray(Request::setTokenFilters, (p, c) -> NameOrDefinition.fromXContent(p),
new ParseField("filter"));
PARSER.declareObjectArray(Request::setCharFilters, (p, c) -> NameOrDefinition.fromXContent(p),
new ParseField("char_filter"));
PARSER.declareString(Request::field, new ParseField("field"));
PARSER.declareBoolean(Request::explain, new ParseField("explain"));
PARSER.declareStringArray(Request::attributes, new ParseField("attributes"));
PARSER.declareString(Request::normalizer, new ParseField("normalizer"));
}
}
public static class Response extends ActionResponse implements ToXContentObject {
private final DetailAnalyzeResponse detail;
private final List<AnalyzeToken> tokens;
public Response(List<AnalyzeToken> tokens, DetailAnalyzeResponse detail) {
this.tokens = tokens;
this.detail = detail;
}
public Response(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
if (size > 0) {
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(new AnalyzeToken(in));
}
}
else {
tokens = null;
}
detail = in.readOptionalWriteable(DetailAnalyzeResponse::new);
}
@Override
public void readFrom(StreamInput in) throws IOException {
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
public List<AnalyzeToken> getTokens() {
return this.tokens;
}
public DetailAnalyzeResponse detail() {
return this.detail;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (tokens != null) {
builder.startArray(Fields.TOKENS);
for (AnalyzeToken token : tokens) {
token.toXContent(builder, params);
}
builder.endArray();
}
if (detail != null) {
builder.startObject(Fields.DETAIL);
detail.toXContent(builder, params);
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (tokens != null) {
out.writeVInt(tokens.size());
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
} else {
out.writeVInt(0);
}
out.writeOptionalWriteable(detail);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Response that = (Response) o;
return Objects.equals(detail, that.detail) &&
Objects.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
return Objects.hash(detail, tokens);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
static final class Fields {
static final String TOKENS = "tokens";
static final String DETAIL = "detail";
}
}
public static class AnalyzeToken implements Writeable, ToXContentObject {
private final String term;
private final int startOffset;
private final int endOffset;
private final int position;
private final int positionLength;
private final Map<String, Object> attributes;
private final String type;
static final String TOKEN = "token";
static final String START_OFFSET = "start_offset";
static final String END_OFFSET = "end_offset";
static final String TYPE = "type";
static final String POSITION = "position";
static final String POSITION_LENGTH = "positionLength";
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeToken that = (AnalyzeToken) o;
return startOffset == that.startOffset &&
endOffset == that.endOffset &&
position == that.position &&
positionLength == that.positionLength &&
Objects.equals(term, that.term) &&
Objects.equals(attributes, that.attributes) &&
Objects.equals(type, that.type);
}
@Override
public int hashCode() {
return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type);
}
public AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength,
String type, Map<String, Object> attributes) {
this.term = term;
this.position = position;
this.startOffset = startOffset;
this.endOffset = endOffset;
this.positionLength = positionLength;
this.type = type;
this.attributes = attributes;
}
AnalyzeToken(StreamInput in) throws IOException {
term = in.readString();
startOffset = in.readInt();
endOffset = in.readInt();
position = in.readVInt();
Integer len = in.readOptionalVInt();
if (len != null) {
positionLength = len;
} else {
positionLength = 1;
}
type = in.readOptionalString();
attributes = in.readMap();
}
public String getTerm() {
return this.term;
}
public int getStartOffset() {
return this.startOffset;
}
public int getEndOffset() {
return this.endOffset;
}
public int getPosition() {
return this.position;
}
public int getPositionLength() {
return this.positionLength;
}
public String getType() {
return this.type;
}
public Map<String, Object> getAttributes(){
return this.attributes;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(TOKEN, term);
builder.field(START_OFFSET, startOffset);
builder.field(END_OFFSET, endOffset);
builder.field(TYPE, type);
builder.field(POSITION, position);
if (positionLength > 1) {
builder.field(POSITION_LENGTH, positionLength);
}
if (attributes != null && !attributes.isEmpty()) {
Map<String, Object> sortedAttributes = new TreeMap<>(attributes);
for (Map.Entry<String, Object> entity : sortedAttributes.entrySet()) {
builder.field(entity.getKey(), entity.getValue());
}
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(term);
out.writeInt(startOffset);
out.writeInt(endOffset);
out.writeVInt(position);
out.writeOptionalVInt(positionLength > 1 ? positionLength : null);
out.writeOptionalString(type);
out.writeMapWithConsistentOrder(attributes);
}
}
public static class DetailAnalyzeResponse implements Writeable, ToXContentFragment {
private final boolean customAnalyzer;
private final AnalyzeTokenList analyzer;
private final CharFilteredText[] charfilters;
private final AnalyzeTokenList tokenizer;
private final AnalyzeTokenList[] tokenfilters;
public DetailAnalyzeResponse(AnalyzeTokenList analyzer) {
this(false, analyzer, null, null, null);
}
public DetailAnalyzeResponse(CharFilteredText[] charfilters, AnalyzeTokenList tokenizer, AnalyzeTokenList[] tokenfilters) {
this(true, null, charfilters, tokenizer, tokenfilters);
}
DetailAnalyzeResponse(boolean customAnalyzer,
AnalyzeTokenList analyzer,
CharFilteredText[] charfilters,
AnalyzeTokenList tokenizer,
AnalyzeTokenList[] tokenfilters) {
this.customAnalyzer = customAnalyzer;
this.analyzer = analyzer;
this.charfilters = charfilters;
this.tokenizer = tokenizer;
this.tokenfilters = tokenfilters;
}
DetailAnalyzeResponse(StreamInput in) throws IOException {
this.customAnalyzer = in.readBoolean();
if (customAnalyzer) {
tokenizer = new AnalyzeTokenList(in);
int size = in.readVInt();
if (size > 0) {
charfilters = new CharFilteredText[size];
for (int i = 0; i < size; i++) {
charfilters[i] = new CharFilteredText(in);
}
} else {
charfilters = null;
}
size = in.readVInt();
if (size > 0) {
tokenfilters = new AnalyzeTokenList[size];
for (int i = 0; i < size; i++) {
tokenfilters[i] = new AnalyzeTokenList(in);
}
} else {
tokenfilters = null;
}
analyzer = null;
} else {
analyzer = new AnalyzeTokenList(in);
tokenfilters = null;
tokenizer = null;
charfilters = null;
}
}
public AnalyzeTokenList analyzer() {
return this.analyzer;
}
public CharFilteredText[] charfilters() {
return this.charfilters;
}
public AnalyzeTokenList tokenizer() {
return tokenizer;
}
public AnalyzeTokenList[] tokenfilters() {
return tokenfilters;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
return customAnalyzer == that.customAnalyzer &&
Objects.equals(analyzer, that.analyzer) &&
Arrays.equals(charfilters, that.charfilters) &&
Objects.equals(tokenizer, that.tokenizer) &&
Arrays.equals(tokenfilters, that.tokenfilters);
}
@Override
public int hashCode() {
int result = Objects.hash(customAnalyzer, analyzer, tokenizer);
result = 31 * result + Arrays.hashCode(charfilters);
result = 31 * result + Arrays.hashCode(tokenfilters);
return result;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("custom_analyzer", customAnalyzer);
if (analyzer != null) {
builder.startObject("analyzer");
analyzer.toXContentWithoutObject(builder, params);
builder.endObject();
}
if (charfilters != null) {
builder.startArray("charfilters");
for (CharFilteredText charfilter : charfilters) {
charfilter.toXContent(builder, params);
}
builder.endArray();
}
if (tokenizer != null) {
builder.startObject("tokenizer");
tokenizer.toXContentWithoutObject(builder, params);
builder.endObject();
}
if (tokenfilters != null) {
builder.startArray("tokenfilters");
for (AnalyzeTokenList tokenfilter : tokenfilters) {
tokenfilter.toXContent(builder, params);
}
builder.endArray();
}
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(customAnalyzer);
if (customAnalyzer) {
tokenizer.writeTo(out);
if (charfilters != null) {
out.writeVInt(charfilters.length);
for (CharFilteredText charfilter : charfilters) {
charfilter.writeTo(out);
}
} else {
out.writeVInt(0);
}
if (tokenfilters != null) {
out.writeVInt(tokenfilters.length);
for (AnalyzeTokenList tokenfilter : tokenfilters) {
tokenfilter.writeTo(out);
}
} else {
out.writeVInt(0);
}
} else {
analyzer.writeTo(out);
}
}
}
public static class AnalyzeTokenList implements Writeable, ToXContentObject {
private final String name;
private final AnalyzeToken[] tokens;
static final String NAME = "name";
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeTokenList that = (AnalyzeTokenList) o;
return Objects.equals(name, that.name) &&
Arrays.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(tokens);
return result;
}
public AnalyzeTokenList(String name, AnalyzeToken[] tokens) {
this.name = name;
this.tokens = tokens;
}
AnalyzeTokenList(StreamInput in) throws IOException {
name = in.readString();
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = new AnalyzeToken(in);
}
}
else {
tokens = null;
}
}
public String getName() {
return name;
}
public AnalyzeToken[] getTokens() {
return tokens;
}
void toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME, this.name);
builder.startArray(Response.Fields.TOKENS);
if (tokens != null) {
for (AnalyzeToken token : tokens) {
token.toXContent(builder, params);
}
}
builder.endArray();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
toXContentWithoutObject(builder, params);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
if (tokens != null) {
out.writeVInt(tokens.length);
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
} else {
out.writeVInt(0);
}
}
}
public static class CharFilteredText implements Writeable, ToXContentObject {
private final String name;
private final String[] texts;
static final String NAME = "name";
static final String FILTERED_TEXT = "filtered_text";
public CharFilteredText(String name, String[] texts) {
this.name = name;
if (texts != null) {
this.texts = texts;
} else {
this.texts = Strings.EMPTY_ARRAY;
}
}
CharFilteredText(StreamInput in) throws IOException {
name = in.readString();
texts = in.readStringArray();
}
public String getName() {
return name;
}
public String[] getTexts() {
return texts;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(NAME, name);
builder.array(FILTERED_TEXT, texts);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeStringArray(texts);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CharFilteredText that = (CharFilteredText) o;
return Objects.equals(name, that.name) &&
Arrays.equals(texts, that.texts);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(texts);
return result;
}
}
}

View File

@ -1,307 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
* A request to analyze a text associated with a specific index. Allow to provide
* the actual analyzer name to perform the analysis with.
*/
public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> implements ToXContentObject {
private String[] text;
private String analyzer;
private NameOrDefinition tokenizer;
private final List<NameOrDefinition> tokenFilters = new ArrayList<>();
private final List<NameOrDefinition> charFilters = new ArrayList<>();
private String field;
private boolean explain = false;
private String[] attributes = Strings.EMPTY_ARRAY;
private String normalizer;
public static class NameOrDefinition implements Writeable, ToXContentFragment {
// exactly one of these two members is not null
public final String name;
public final Settings definition;
NameOrDefinition(String name) {
this.name = Objects.requireNonNull(name);
this.definition = null;
}
NameOrDefinition(Map<String, ?> definition) {
this.name = null;
Objects.requireNonNull(definition);
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(definition);
this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse [" + definition + "]", e);
}
}
NameOrDefinition(StreamInput in) throws IOException {
name = in.readOptionalString();
if (in.readBoolean()) {
definition = Settings.readSettingsFromStream(in);
} else {
definition = null;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(name);
boolean isNotNullDefinition = this.definition != null;
out.writeBoolean(isNotNullDefinition);
if (isNotNullDefinition) {
Settings.writeSettingsToStream(definition, out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (definition == null) {
return builder.value(name);
}
return definition.toXContent(builder, params);
}
}
public AnalyzeRequest() {
}
/**
* Constructs a new analyzer request for the provided index.
*
* @param index The text to analyze
*/
public AnalyzeRequest(String index) {
this.index(index);
}
public String[] text() {
return this.text;
}
public AnalyzeRequest text(String... text) {
this.text = text;
return this;
}
public AnalyzeRequest analyzer(String analyzer) {
this.analyzer = analyzer;
return this;
}
public String analyzer() {
return this.analyzer;
}
public AnalyzeRequest tokenizer(String tokenizer) {
this.tokenizer = new NameOrDefinition(tokenizer);
return this;
}
public AnalyzeRequest tokenizer(Map<String, ?> tokenizer) {
this.tokenizer = new NameOrDefinition(tokenizer);
return this;
}
public NameOrDefinition tokenizer() {
return this.tokenizer;
}
public AnalyzeRequest addTokenFilter(String tokenFilter) {
this.tokenFilters.add(new NameOrDefinition(tokenFilter));
return this;
}
public AnalyzeRequest addTokenFilter(Map<String, ?> tokenFilter) {
this.tokenFilters.add(new NameOrDefinition(tokenFilter));
return this;
}
public List<NameOrDefinition> tokenFilters() {
return this.tokenFilters;
}
public AnalyzeRequest addCharFilter(Map<String, ?> charFilter) {
this.charFilters.add(new NameOrDefinition(charFilter));
return this;
}
public AnalyzeRequest addCharFilter(String charFilter) {
this.charFilters.add(new NameOrDefinition(charFilter));
return this;
}
public List<NameOrDefinition> charFilters() {
return this.charFilters;
}
public AnalyzeRequest field(String field) {
this.field = field;
return this;
}
public String field() {
return this.field;
}
public AnalyzeRequest explain(boolean explain) {
this.explain = explain;
return this;
}
public boolean explain() {
return this.explain;
}
public AnalyzeRequest attributes(String... attributes) {
if (attributes == null) {
throw new IllegalArgumentException("attributes must not be null");
}
this.attributes = attributes;
return this;
}
public String[] attributes() {
return this.attributes;
}
public String normalizer() {
return this.normalizer;
}
public AnalyzeRequest normalizer(String normalizer) {
this.normalizer = normalizer;
return this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (text == null || text.length == 0) {
validationException = addValidationError("text is missing", validationException);
}
if ((index == null || index.length() == 0) && normalizer != null) {
validationException = addValidationError("index is required if normalizer is specified", validationException);
}
if (normalizer != null && (tokenizer != null || analyzer != null)) {
validationException = addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException);
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
text = in.readStringArray();
analyzer = in.readOptionalString();
tokenizer = in.readOptionalWriteable(NameOrDefinition::new);
tokenFilters.addAll(in.readList(NameOrDefinition::new));
charFilters.addAll(in.readList(NameOrDefinition::new));
field = in.readOptionalString();
explain = in.readBoolean();
attributes = in.readStringArray();
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
normalizer = in.readOptionalString();
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(text);
out.writeOptionalString(analyzer);
out.writeOptionalWriteable(tokenizer);
out.writeList(tokenFilters);
out.writeList(charFilters);
out.writeOptionalString(field);
out.writeBoolean(explain);
out.writeStringArray(attributes);
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
out.writeOptionalString(normalizer);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("text", text);
if (Strings.isNullOrEmpty(analyzer) == false) {
builder.field("analyzer", analyzer);
}
if (tokenizer != null) {
tokenizer.toXContent(builder, params);
}
if (tokenFilters.size() > 0) {
builder.field("filter", tokenFilters);
}
if (charFilters.size() > 0) {
builder.field("char_filter", charFilters);
}
if (Strings.isNullOrEmpty(field) == false) {
builder.field("field", field);
}
if (explain) {
builder.field("explain", true);
}
if (attributes.length > 0) {
builder.field("attributes", attributes);
}
if (Strings.isNullOrEmpty(normalizer) == false) {
builder.field("normalizer", normalizer);
}
return builder.endObject();
}
}

View File

@ -23,14 +23,15 @@ import org.elasticsearch.client.ElasticsearchClient;
import java.util.Map;
public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder<AnalyzeRequest, AnalyzeResponse, AnalyzeRequestBuilder> {
public class AnalyzeRequestBuilder
extends SingleShardOperationRequestBuilder<AnalyzeAction.Request, AnalyzeAction.Response, AnalyzeRequestBuilder> {
public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action) {
super(client, action, new AnalyzeRequest());
super(client, action, new AnalyzeAction.Request());
}
public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action, String index, String... text) {
super(client, action, new AnalyzeRequest(index).text(text));
super(client, action, new AnalyzeAction.Request(index).text(text));
}
/**

View File

@ -1,320 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeResponse.AnalyzeToken>, ToXContentObject {
public static class AnalyzeToken implements Writeable, ToXContentObject {
private final String term;
private final int startOffset;
private final int endOffset;
private final int position;
private final int positionLength;
private final Map<String, Object> attributes;
private final String type;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeToken that = (AnalyzeToken) o;
return startOffset == that.startOffset &&
endOffset == that.endOffset &&
position == that.position &&
positionLength == that.positionLength &&
Objects.equals(term, that.term) &&
Objects.equals(attributes, that.attributes) &&
Objects.equals(type, that.type);
}
@Override
public int hashCode() {
return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type);
}
AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength,
String type, Map<String, Object> attributes) {
this.term = term;
this.position = position;
this.startOffset = startOffset;
this.endOffset = endOffset;
this.positionLength = positionLength;
this.type = type;
this.attributes = attributes;
}
AnalyzeToken(StreamInput in) throws IOException {
term = in.readString();
startOffset = in.readInt();
endOffset = in.readInt();
position = in.readVInt();
Integer len = in.readOptionalVInt();
if (len != null) {
positionLength = len;
} else {
positionLength = 1;
}
type = in.readOptionalString();
attributes = in.readMap();
}
public String getTerm() {
return this.term;
}
public int getStartOffset() {
return this.startOffset;
}
public int getEndOffset() {
return this.endOffset;
}
public int getPosition() {
return this.position;
}
public int getPositionLength() {
return this.positionLength;
}
public String getType() {
return this.type;
}
public Map<String, Object> getAttributes(){
return this.attributes;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Fields.TOKEN, term);
builder.field(Fields.START_OFFSET, startOffset);
builder.field(Fields.END_OFFSET, endOffset);
builder.field(Fields.TYPE, type);
builder.field(Fields.POSITION, position);
if (positionLength > 1) {
builder.field(Fields.POSITION_LENGTH, positionLength);
}
if (attributes != null && !attributes.isEmpty()) {
Map<String, Object> sortedAttributes = new TreeMap<>(attributes);
for (Map.Entry<String, Object> entity : sortedAttributes.entrySet()) {
builder.field(entity.getKey(), entity.getValue());
}
}
builder.endObject();
return builder;
}
public static AnalyzeToken fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
String field = null;
String term = "";
int position = -1;
int startOffset = -1;
int endOffset = -1;
int positionLength = 1;
String type = "";
Map<String, Object> attributes = new HashMap<>();
for (XContentParser.Token t = parser.nextToken(); t != XContentParser.Token.END_OBJECT; t = parser.nextToken()) {
if (t == XContentParser.Token.FIELD_NAME) {
field = parser.currentName();
continue;
}
if (Fields.TOKEN.equals(field)) {
term = parser.text();
} else if (Fields.POSITION.equals(field)) {
position = parser.intValue();
} else if (Fields.START_OFFSET.equals(field)) {
startOffset = parser.intValue();
} else if (Fields.END_OFFSET.equals(field)) {
endOffset = parser.intValue();
} else if (Fields.POSITION_LENGTH.equals(field)) {
positionLength = parser.intValue();
} else if (Fields.TYPE.equals(field)) {
type = parser.text();
} else {
if (t == XContentParser.Token.VALUE_STRING) {
attributes.put(field, parser.text());
} else if (t == XContentParser.Token.VALUE_NUMBER) {
attributes.put(field, parser.numberValue());
} else if (t == XContentParser.Token.VALUE_BOOLEAN) {
attributes.put(field, parser.booleanValue());
} else if (t == XContentParser.Token.START_OBJECT) {
attributes.put(field, parser.map());
} else if (t == XContentParser.Token.START_ARRAY) {
attributes.put(field, parser.list());
}
}
}
return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(term);
out.writeInt(startOffset);
out.writeInt(endOffset);
out.writeVInt(position);
out.writeOptionalVInt(positionLength > 1 ? positionLength : null);
out.writeOptionalString(type);
out.writeMapWithConsistentOrder(attributes);
}
}
private final DetailAnalyzeResponse detail;
private final List<AnalyzeToken> tokens;
public AnalyzeResponse(List<AnalyzeToken> tokens, DetailAnalyzeResponse detail) {
this.tokens = tokens;
this.detail = detail;
}
public AnalyzeResponse(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
if (size > 0) {
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(new AnalyzeToken(in));
}
}
else {
tokens = null;
}
detail = in.readOptionalWriteable(DetailAnalyzeResponse::new);
}
@Override
public void readFrom(StreamInput in) throws IOException {
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
public List<AnalyzeToken> getTokens() {
return this.tokens;
}
public DetailAnalyzeResponse detail() {
return this.detail;
}
@Override
public Iterator<AnalyzeToken> iterator() {
return tokens.iterator();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (tokens != null) {
builder.startArray(Fields.TOKENS);
for (AnalyzeToken token : tokens) {
token.toXContent(builder, params);
}
builder.endArray();
}
if (detail != null) {
builder.startObject(Fields.DETAIL);
detail.toXContent(builder, params);
builder.endObject();
}
builder.endObject();
return builder;
}
private static final ConstructingObjectParser<AnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("analyze_response",
true, args -> new AnalyzeResponse((List<AnalyzeToken>) args[0], (DetailAnalyzeResponse) args[1]));
static {
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AnalyzeToken.fromXContent(p), new ParseField(Fields.TOKENS));
PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(Fields.DETAIL));
}
public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (tokens != null) {
out.writeVInt(tokens.size());
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
} else {
out.writeVInt(0);
}
out.writeOptionalWriteable(detail);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeResponse that = (AnalyzeResponse) o;
return Objects.equals(detail, that.detail) &&
Objects.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
return Objects.hash(detail, tokens);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
static final class Fields {
static final String TOKENS = "tokens";
static final String TOKEN = "token";
static final String START_OFFSET = "start_offset";
static final String END_OFFSET = "end_offset";
static final String TYPE = "type";
static final String POSITION = "position";
static final String POSITION_LENGTH = "positionLength";
static final String DETAIL = "detail";
}
}

View File

@ -1,400 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DetailAnalyzeResponse implements Writeable, ToXContentFragment {
private final boolean customAnalyzer;
private final AnalyzeTokenList analyzer;
private final CharFilteredText[] charfilters;
private final AnalyzeTokenList tokenizer;
private final AnalyzeTokenList[] tokenfilters;
public DetailAnalyzeResponse(AnalyzeTokenList analyzer) {
this(false, analyzer, null, null, null);
}
public DetailAnalyzeResponse(CharFilteredText[] charfilters, AnalyzeTokenList tokenizer, AnalyzeTokenList[] tokenfilters) {
this(true, null, charfilters, tokenizer, tokenfilters);
}
public DetailAnalyzeResponse(boolean customAnalyzer,
AnalyzeTokenList analyzer,
CharFilteredText[] charfilters,
AnalyzeTokenList tokenizer,
AnalyzeTokenList[] tokenfilters) {
this.customAnalyzer = customAnalyzer;
this.analyzer = analyzer;
this.charfilters = charfilters;
this.tokenizer = tokenizer;
this.tokenfilters = tokenfilters;
}
public DetailAnalyzeResponse(StreamInput in) throws IOException {
this.customAnalyzer = in.readBoolean();
if (customAnalyzer) {
tokenizer = new AnalyzeTokenList(in);
int size = in.readVInt();
if (size > 0) {
charfilters = new CharFilteredText[size];
for (int i = 0; i < size; i++) {
charfilters[i] = new CharFilteredText(in);
}
}
else {
charfilters = null;
}
size = in.readVInt();
if (size > 0) {
tokenfilters = new AnalyzeTokenList[size];
for (int i = 0; i < size; i++) {
tokenfilters[i] = new AnalyzeTokenList(in);
}
}
else {
tokenfilters = null;
}
analyzer = null;
} else {
analyzer = new AnalyzeTokenList(in);
tokenfilters = null;
tokenizer = null;
charfilters = null;
}
}
public AnalyzeTokenList analyzer() {
return this.analyzer;
}
public CharFilteredText[] charfilters() {
return this.charfilters;
}
public AnalyzeTokenList tokenizer() {
return tokenizer;
}
public AnalyzeTokenList[] tokenfilters() {
return tokenfilters;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
return customAnalyzer == that.customAnalyzer &&
Objects.equals(analyzer, that.analyzer) &&
Arrays.equals(charfilters, that.charfilters) &&
Objects.equals(tokenizer, that.tokenizer) &&
Arrays.equals(tokenfilters, that.tokenfilters);
}
@Override
public int hashCode() {
int result = Objects.hash(customAnalyzer, analyzer, tokenizer);
result = 31 * result + Arrays.hashCode(charfilters);
result = 31 * result + Arrays.hashCode(tokenfilters);
return result;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.CUSTOM_ANALYZER, customAnalyzer);
if (analyzer != null) {
builder.startObject(Fields.ANALYZER);
analyzer.toXContentWithoutObject(builder, params);
builder.endObject();
}
if (charfilters != null) {
builder.startArray(Fields.CHARFILTERS);
for (CharFilteredText charfilter : charfilters) {
charfilter.toXContent(builder, params);
}
builder.endArray();
}
if (tokenizer != null) {
builder.startObject(Fields.TOKENIZER);
tokenizer.toXContentWithoutObject(builder, params);
builder.endObject();
}
if (tokenfilters != null) {
builder.startArray(Fields.TOKENFILTERS);
for (AnalyzeTokenList tokenfilter : tokenfilters) {
tokenfilter.toXContent(builder, params);
}
builder.endArray();
}
return builder;
}
@SuppressWarnings("unchecked")
private static <T> T[] fromList(Class<T> clazz, List<T> list) {
if (list == null) {
return null;
}
return list.toArray((T[])Array.newInstance(clazz, 0));
}
static final ConstructingObjectParser<DetailAnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("detail",
true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1],
fromList(CharFilteredText.class, (List<CharFilteredText>)args[2]),
(AnalyzeTokenList) args[3],
fromList(AnalyzeTokenList.class, (List<AnalyzeTokenList>)args[4])));
static {
PARSER.declareBoolean(constructorArg(), new ParseField(Fields.CUSTOM_ANALYZER));
PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.ANALYZER));
PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField(Fields.CHARFILTERS));
PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENIZER));
PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENFILTERS));
}
public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
static final class Fields {
static final String NAME = "name";
static final String FILTERED_TEXT = "filtered_text";
static final String CUSTOM_ANALYZER = "custom_analyzer";
static final String ANALYZER = "analyzer";
static final String CHARFILTERS = "charfilters";
static final String TOKENIZER = "tokenizer";
static final String TOKENFILTERS = "tokenfilters";
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(customAnalyzer);
if (customAnalyzer) {
tokenizer.writeTo(out);
if (charfilters != null) {
out.writeVInt(charfilters.length);
for (CharFilteredText charfilter : charfilters) {
charfilter.writeTo(out);
}
} else {
out.writeVInt(0);
}
if (tokenfilters != null) {
out.writeVInt(tokenfilters.length);
for (AnalyzeTokenList tokenfilter : tokenfilters) {
tokenfilter.writeTo(out);
}
} else {
out.writeVInt(0);
}
} else {
analyzer.writeTo(out);
}
}
public static class AnalyzeTokenList implements Writeable, ToXContentObject {
private final String name;
private final AnalyzeResponse.AnalyzeToken[] tokens;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeTokenList that = (AnalyzeTokenList) o;
return Objects.equals(name, that.name) &&
Arrays.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(tokens);
return result;
}
public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) {
this.name = name;
this.tokens = tokens;
}
public AnalyzeTokenList(StreamInput in) throws IOException {
name = in.readString();
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeResponse.AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = new AnalyzeResponse.AnalyzeToken(in);
}
}
else {
tokens = null;
}
}
public String getName() {
return name;
}
public AnalyzeResponse.AnalyzeToken[] getTokens() {
return tokens;
}
XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.NAME, this.name);
builder.startArray(AnalyzeResponse.Fields.TOKENS);
if (tokens != null) {
for (AnalyzeResponse.AnalyzeToken token : tokens) {
token.toXContent(builder, params);
}
}
builder.endArray();
return builder;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
toXContentWithoutObject(builder, params);
builder.endObject();
return builder;
}
private static final ConstructingObjectParser<AnalyzeTokenList, Void> PARSER = new ConstructingObjectParser<>("token_list",
true, args -> new AnalyzeTokenList((String) args[0],
fromList(AnalyzeResponse.AnalyzeToken.class, (List<AnalyzeResponse.AnalyzeToken>)args[1])));
static {
PARSER.declareString(constructorArg(), new ParseField(Fields.NAME));
PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p),
new ParseField(AnalyzeResponse.Fields.TOKENS));
}
public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
if (tokens != null) {
out.writeVInt(tokens.length);
for (AnalyzeResponse.AnalyzeToken token : tokens) {
token.writeTo(out);
}
} else {
out.writeVInt(0);
}
}
}
public static class CharFilteredText implements Writeable, ToXContentObject {
private final String name;
private final String[] texts;
public CharFilteredText(String name, String[] texts) {
this.name = name;
if (texts != null) {
this.texts = texts;
} else {
this.texts = Strings.EMPTY_ARRAY;
}
}
public CharFilteredText(StreamInput in) throws IOException {
name = in.readString();
texts = in.readStringArray();
}
public String getName() {
return name;
}
public String[] getTexts() {
return texts;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Fields.NAME, name);
builder.array(Fields.FILTERED_TEXT, texts);
builder.endObject();
return builder;
}
private static final ConstructingObjectParser<CharFilteredText, Void> PARSER = new ConstructingObjectParser<>("char_filtered_text",
true, args -> new CharFilteredText((String) args[0], ((List<String>) args[1]).toArray(new String[0])));
static {
PARSER.declareString(constructorArg(), new ParseField(Fields.NAME));
PARSER.declareStringArray(constructorArg(), new ParseField(Fields.FILTERED_TEXT));
}
public static CharFilteredText fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeStringArray(texts);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CharFilteredText that = (CharFilteredText) o;
return Objects.equals(name, that.name) &&
Arrays.equals(texts, that.texts);
}
@Override
public int hashCode() {
int result = Objects.hash(name);
result = 31 * result + Arrays.hashCode(texts);
return result;
}
}
}

View File

@ -79,7 +79,7 @@ import java.util.function.Function;
/**
* Transport action used to execute analyze requests
*/
public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRequest, AnalyzeResponse> {
public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeAction.Request, AnalyzeAction.Response> {
private final Settings settings;
private final IndicesService indicesService;
@ -90,19 +90,19 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
TransportService transportService, IndicesService indicesService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, Environment environment) {
super(AnalyzeAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
AnalyzeRequest::new, ThreadPool.Names.ANALYZE);
AnalyzeAction.Request::new, ThreadPool.Names.ANALYZE);
this.settings = settings;
this.indicesService = indicesService;
this.environment = environment;
}
@Override
protected Writeable.Reader<AnalyzeResponse> getResponseReader() {
return AnalyzeResponse::new;
protected Writeable.Reader<AnalyzeAction.Response> getResponseReader() {
return AnalyzeAction.Response::new;
}
@Override
protected boolean resolveIndex(AnalyzeRequest request) {
protected boolean resolveIndex(AnalyzeAction.Request request) {
return request.index() != null;
}
@ -124,7 +124,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
}
@Override
protected AnalyzeResponse shardOperation(AnalyzeRequest request, ShardId shardId) {
protected AnalyzeAction.Response shardOperation(AnalyzeAction.Request request, ShardId shardId) {
try {
final IndexService indexService;
if (shardId != null) {
@ -170,8 +170,9 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
}
public static AnalyzeResponse analyze(AnalyzeRequest request, String field, Analyzer analyzer, IndexAnalyzers indexAnalyzers,
AnalysisRegistry analysisRegistry, Environment environment, int maxTokenCount) throws IOException {
public static AnalyzeAction.Response analyze(AnalyzeAction.Request request, String field, Analyzer analyzer,
IndexAnalyzers indexAnalyzers, AnalysisRegistry analysisRegistry,
Environment environment, int maxTokenCount) throws IOException {
boolean closeAnalyzer = false;
if (analyzer == null && request.analyzer() != null) {
if (indexAnalyzers == null) {
@ -237,8 +238,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
throw new IllegalArgumentException("failed to find analyzer");
}
List<AnalyzeResponse.AnalyzeToken> tokens = null;
DetailAnalyzeResponse detail = null;
List<AnalyzeAction.AnalyzeToken> tokens = null;
AnalyzeAction.DetailAnalyzeResponse detail = null;
if (request.explain()) {
detail = detailAnalyze(request, analyzer, field, maxTokenCount);
@ -250,13 +251,13 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
analyzer.close();
}
return new AnalyzeResponse(tokens, detail);
return new AnalyzeAction.Response(tokens, detail);
}
private static List<AnalyzeResponse.AnalyzeToken> simpleAnalyze(AnalyzeRequest request,
Analyzer analyzer, String field, int maxTokenCount) {
private static List<AnalyzeAction.AnalyzeToken> simpleAnalyze(AnalyzeAction.Request request,
Analyzer analyzer, String field, int maxTokenCount) {
TokenCounter tc = new TokenCounter(maxTokenCount);
List<AnalyzeResponse.AnalyzeToken> tokens = new ArrayList<>();
List<AnalyzeAction.AnalyzeToken> tokens = new ArrayList<>();
int lastPosition = -1;
int lastOffset = 0;
for (String text : request.text()) {
@ -273,7 +274,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
if (increment > 0) {
lastPosition = lastPosition + increment;
}
tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(),
tokens.add(new AnalyzeAction.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(),
lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(), null));
tc.increment();
}
@ -290,8 +291,9 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
return tokens;
}
private static DetailAnalyzeResponse detailAnalyze(AnalyzeRequest request, Analyzer analyzer, String field, int maxTokenCount) {
DetailAnalyzeResponse detailResponse;
private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.Request request, Analyzer analyzer,
String field, int maxTokenCount) {
AnalyzeAction.DetailAnalyzeResponse detailResponse;
final Set<String> includeAttributes = new HashSet<>();
if (request.attributes() != null) {
for (String attribute : request.attributes()) {
@ -351,25 +353,25 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
}
}
DetailAnalyzeResponse.CharFilteredText[] charFilteredLists =
new DetailAnalyzeResponse.CharFilteredText[charFiltersTexts.length];
AnalyzeAction.CharFilteredText[] charFilteredLists =
new AnalyzeAction.CharFilteredText[charFiltersTexts.length];
if (charFilterFactories != null) {
for (int charFilterIndex = 0; charFilterIndex < charFiltersTexts.length; charFilterIndex++) {
charFilteredLists[charFilterIndex] = new DetailAnalyzeResponse.CharFilteredText(
charFilteredLists[charFilterIndex] = new AnalyzeAction.CharFilteredText(
charFilterFactories[charFilterIndex].name(), charFiltersTexts[charFilterIndex]);
}
}
DetailAnalyzeResponse.AnalyzeTokenList[] tokenFilterLists =
new DetailAnalyzeResponse.AnalyzeTokenList[tokenFiltersTokenListCreator.length];
AnalyzeAction.AnalyzeTokenList[] tokenFilterLists =
new AnalyzeAction.AnalyzeTokenList[tokenFiltersTokenListCreator.length];
if (tokenFilterFactories != null) {
for (int tokenFilterIndex = 0; tokenFilterIndex < tokenFiltersTokenListCreator.length; tokenFilterIndex++) {
tokenFilterLists[tokenFilterIndex] = new DetailAnalyzeResponse.AnalyzeTokenList(
tokenFilterLists[tokenFilterIndex] = new AnalyzeAction.AnalyzeTokenList(
tokenFilterFactories[tokenFilterIndex].name(), tokenFiltersTokenListCreator[tokenFilterIndex].getArrayTokens());
}
}
detailResponse = new DetailAnalyzeResponse(charFilteredLists, new DetailAnalyzeResponse.AnalyzeTokenList(
detailResponse = new AnalyzeAction.DetailAnalyzeResponse(charFilteredLists, new AnalyzeAction.AnalyzeTokenList(
customAnalyzer.getTokenizerName(), tokenizerTokenListCreator.getArrayTokens()), tokenFilterLists);
} else {
String name;
@ -384,7 +386,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
tokenListCreator.analyze(analyzer.tokenStream(field, text), analyzer, field,
includeAttributes);
}
detailResponse = new DetailAnalyzeResponse(new DetailAnalyzeResponse.AnalyzeTokenList(name, tokenListCreator.getArrayTokens()));
detailResponse
= new AnalyzeAction.DetailAnalyzeResponse(new AnalyzeAction.AnalyzeTokenList(name, tokenListCreator.getArrayTokens()));
}
return detailResponse;
}
@ -443,7 +446,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
private static class TokenListCreator {
int lastPosition = -1;
int lastOffset = 0;
List<AnalyzeResponse.AnalyzeToken> tokens;
List<AnalyzeAction.AnalyzeToken> tokens;
private TokenCounter tc;
TokenListCreator(int maxTokenCount) {
@ -465,7 +468,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
if (increment > 0) {
lastPosition = lastPosition + increment;
}
tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(),
tokens.add(new AnalyzeAction.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(),
lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(),
extractExtendedAttributes(stream, includeAttributes)));
tc.increment();
@ -484,8 +487,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
}
}
private AnalyzeResponse.AnalyzeToken[] getArrayTokens() {
return tokens.toArray(new AnalyzeResponse.AnalyzeToken[tokens.size()]);
private AnalyzeAction.AnalyzeToken[] getArrayTokens() {
return tokens.toArray(new AnalyzeAction.AnalyzeToken[tokens.size()]);
}
}
@ -526,13 +529,13 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
return extendedAttributes;
}
private static List<CharFilterFactory> parseCharFilterFactories(AnalyzeRequest request, IndexSettings indexSettings,
private static List<CharFilterFactory> parseCharFilterFactories(AnalyzeAction.Request request, IndexSettings indexSettings,
AnalysisRegistry analysisRegistry, Environment environment,
boolean normalizer) throws IOException {
List<CharFilterFactory> charFilterFactoryList = new ArrayList<>();
if (request.charFilters() != null && request.charFilters().size() > 0) {
List<AnalyzeRequest.NameOrDefinition> charFilters = request.charFilters();
for (AnalyzeRequest.NameOrDefinition charFilter : charFilters) {
List<AnalyzeAction.Request.NameOrDefinition> charFilters = request.charFilters();
for (AnalyzeAction.Request.NameOrDefinition charFilter : charFilters) {
CharFilterFactory charFilterFactory;
// parse anonymous settings
if (charFilter.definition != null) {
@ -619,7 +622,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
}
}
private static List<TokenFilterFactory> parseTokenFilterFactories(AnalyzeRequest request, IndexSettings indexSettings,
private static List<TokenFilterFactory> parseTokenFilterFactories(AnalyzeAction.Request request, IndexSettings indexSettings,
AnalysisRegistry analysisRegistry, Environment environment,
Tuple<String, TokenizerFactory> tokenizerFactory,
List<CharFilterFactory> charFilterFactoryList,
@ -627,8 +630,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
List<TokenFilterFactory> tokenFilterFactoryList = new ArrayList<>();
DeferredTokenFilterRegistry deferredRegistry = new DeferredTokenFilterRegistry(analysisRegistry, indexSettings);
if (request.tokenFilters() != null && request.tokenFilters().size() > 0) {
List<AnalyzeRequest.NameOrDefinition> tokenFilters = request.tokenFilters();
for (AnalyzeRequest.NameOrDefinition tokenFilter : tokenFilters) {
List<AnalyzeAction.Request.NameOrDefinition> tokenFilters = request.tokenFilters();
for (AnalyzeAction.Request.NameOrDefinition tokenFilter : tokenFilters) {
TokenFilterFactory tokenFilterFactory;
// parse anonymous settings
if (tokenFilter.definition != null) {
@ -683,11 +686,12 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
return tokenFilterFactoryList;
}
private static Tuple<String, TokenizerFactory> parseTokenizerFactory(AnalyzeRequest request, IndexAnalyzers indexAnalzyers,
AnalysisRegistry analysisRegistry, Environment environment) throws IOException {
private static Tuple<String, TokenizerFactory> parseTokenizerFactory(AnalyzeAction.Request request, IndexAnalyzers indexAnalzyers,
AnalysisRegistry analysisRegistry,
Environment environment) throws IOException {
String name;
TokenizerFactory tokenizerFactory;
final AnalyzeRequest.NameOrDefinition tokenizer = request.tokenizer();
final AnalyzeAction.Request.NameOrDefinition tokenizer = request.tokenizer();
// parse anonymous settings
if (tokenizer.definition != null) {
Settings settings = getAnonymousSettings(tokenizer.definition);

View File

@ -28,9 +28,8 @@ import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
@ -672,12 +671,12 @@ public interface IndicesAdminClient extends ElasticsearchClient {
/**
* Analyze text under the provided index.
*/
ActionFuture<AnalyzeResponse> analyze(AnalyzeRequest request);
ActionFuture<AnalyzeAction.Response> analyze(AnalyzeAction.Request request);
/**
* Analyze text under the provided index.
*/
void analyze(AnalyzeRequest request, ActionListener<AnalyzeResponse> listener);
void analyze(AnalyzeAction.Request request, ActionListener<AnalyzeAction.Response> listener);
/**
* Analyze text under the provided index.

View File

@ -142,9 +142,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder;
@ -1596,12 +1594,12 @@ public abstract class AbstractClient implements Client {
}
@Override
public ActionFuture<AnalyzeResponse> analyze(final AnalyzeRequest request) {
public ActionFuture<AnalyzeAction.Response> analyze(final AnalyzeAction.Request request) {
return execute(AnalyzeAction.INSTANCE, request);
}
@Override
public void analyze(final AnalyzeRequest request, final ActionListener<AnalyzeResponse> listener) {
public void analyze(final AnalyzeAction.Request request, final ActionListener<AnalyzeAction.Response> listener) {
execute(AnalyzeAction.INSTANCE, request, listener);
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.rest.action.admin.indices;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.settings.Settings;
@ -29,8 +29,6 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST;
@ -64,106 +62,10 @@ public class RestAnalyzeAction extends BaseRestHandler {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
AnalyzeRequest analyzeRequest = new AnalyzeRequest(request.param("index"));
try (XContentParser parser = request.contentOrSourceParamParser()) {
buildFromContent(parser, analyzeRequest);
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse request body", e);
}
return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel));
}
static void buildFromContent(XContentParser parser, AnalyzeRequest analyzeRequest)
throws IOException {
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("Malformed content, must start with an object");
} else {
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (Fields.TEXT.match(currentFieldName, parser.getDeprecationHandler()) &&
token == XContentParser.Token.VALUE_STRING) {
analyzeRequest.text(parser.text());
} else if (Fields.TEXT.match(currentFieldName, parser.getDeprecationHandler()) &&
token == XContentParser.Token.START_ARRAY) {
List<String> texts = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token.isValue() == false) {
throw new IllegalArgumentException(currentFieldName + " array element should only contain text");
}
texts.add(parser.text());
}
analyzeRequest.text(texts.toArray(new String[texts.size()]));
} else if (Fields.ANALYZER.match(currentFieldName, parser.getDeprecationHandler())
&& token == XContentParser.Token.VALUE_STRING) {
analyzeRequest.analyzer(parser.text());
} else if (Fields.FIELD.match(currentFieldName, parser.getDeprecationHandler()) &&
token == XContentParser.Token.VALUE_STRING) {
analyzeRequest.field(parser.text());
} else if (Fields.TOKENIZER.match(currentFieldName, parser.getDeprecationHandler())) {
if (token == XContentParser.Token.VALUE_STRING) {
analyzeRequest.tokenizer(parser.text());
} else if (token == XContentParser.Token.START_OBJECT) {
analyzeRequest.tokenizer(parser.map());
} else {
throw new IllegalArgumentException(currentFieldName + " should be tokenizer's name or setting");
}
} else if (Fields.TOKEN_FILTERS.match(currentFieldName, parser.getDeprecationHandler())
&& token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
analyzeRequest.addTokenFilter(parser.text());
} else if (token == XContentParser.Token.START_OBJECT) {
analyzeRequest.addTokenFilter(parser.map());
} else {
throw new IllegalArgumentException(currentFieldName
+ " array element should contain filter's name or setting");
}
}
} else if (Fields.CHAR_FILTERS.match(currentFieldName, parser.getDeprecationHandler())
&& token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
analyzeRequest.addCharFilter(parser.text());
} else if (token == XContentParser.Token.START_OBJECT) {
analyzeRequest.addCharFilter(parser.map());
} else {
throw new IllegalArgumentException(currentFieldName
+ " array element should contain char filter's name or setting");
}
}
} else if (Fields.EXPLAIN.match(currentFieldName, parser.getDeprecationHandler())) {
if (parser.isBooleanValue()) {
analyzeRequest.explain(parser.booleanValue());
} else {
throw new IllegalArgumentException(currentFieldName + " must be either 'true' or 'false'");
}
} else if (Fields.ATTRIBUTES.match(currentFieldName, parser.getDeprecationHandler()) &&
token == XContentParser.Token.START_ARRAY) {
List<String> attributes = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token.isValue() == false) {
throw new IllegalArgumentException(currentFieldName + " array element should only contain attribute name");
}
attributes.add(parser.text());
}
analyzeRequest.attributes(attributes.toArray(new String[attributes.size()]));
} else if (Fields.NORMALIZER.match(currentFieldName, parser.getDeprecationHandler())) {
if (token == XContentParser.Token.VALUE_STRING) {
analyzeRequest.normalizer(parser.text());
} else {
throw new IllegalArgumentException(currentFieldName + " should be normalizer's name");
}
} else {
throw new IllegalArgumentException("Unknown parameter ["
+ currentFieldName + "] in request body or parameter is of the wrong type[" + token + "] ");
}
}
AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(parser, request.param("index"));
return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel));
}
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexAction;
@ -207,7 +206,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
String analyzeShardAction = AnalyzeAction.NAME + "[s]";
interceptTransportActions(analyzeShardAction);
AnalyzeRequest analyzeRequest = new AnalyzeRequest(randomIndexOrAlias());
AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request(randomIndexOrAlias());
analyzeRequest.text("text");
internalCluster().coordOnlyNodeClient().admin().indices().analyze(analyzeRequest).actionGet();

View File

@ -22,8 +22,7 @@ import org.apache.lucene.analysis.MockTokenFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.UUIDs;
@ -136,15 +135,15 @@ public class TransportAnalyzeActionTests extends ESTestCase {
*/
public void testNoIndexAnalyzers() throws IOException {
// Refer to an analyzer by its type so we get its default configuration
AnalyzeRequest request = new AnalyzeRequest();
AnalyzeAction.Request request = new AnalyzeAction.Request();
request.text("the quick brown fox");
request.analyzer("standard");
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount);
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount);
List<AnalyzeAction.AnalyzeToken> tokens = analyze.getTokens();
assertEquals(4, tokens.size());
// Refer to a token filter by its type so we get its default configuration
request = new AnalyzeRequest();
request = new AnalyzeAction.Request();
request.text("the qu1ck brown fox");
request.tokenizer("standard");
request.addTokenFilter("mock");
@ -157,7 +156,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
assertEquals("fox", tokens.get(2).getTerm());
// We can refer to a pre-configured token filter by its name to get it
request = new AnalyzeRequest();
request = new AnalyzeAction.Request();
request.text("the qu1ck brown fox");
request.tokenizer("standard");
request.addCharFilter("append_foo");
@ -171,7 +170,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
assertEquals("foxfoo", tokens.get(3).getTerm());
// We can refer to a token filter by its type to get its default configuration
request = new AnalyzeRequest();
request = new AnalyzeAction.Request();
request.text("the qu1ck brown fox");
request.tokenizer("standard");
request.addCharFilter("append");
@ -187,11 +186,11 @@ public class TransportAnalyzeActionTests extends ESTestCase {
}
public void testFillsAttributes() throws IOException {
AnalyzeRequest request = new AnalyzeRequest();
AnalyzeAction.Request request = new AnalyzeAction.Request();
request.analyzer("standard");
request.text("the 1 brown fox");
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount);
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount);
List<AnalyzeAction.AnalyzeToken> tokens = analyze.getTokens();
assertEquals(4, tokens.size());
assertEquals("the", tokens.get(0).getTerm());
assertEquals(0, tokens.get(0).getStartOffset());
@ -219,12 +218,12 @@ public class TransportAnalyzeActionTests extends ESTestCase {
}
public void testWithIndexAnalyzers() throws IOException {
AnalyzeRequest request = new AnalyzeRequest();
AnalyzeAction.Request request = new AnalyzeAction.Request();
request.text("the quick brown fox");
request.analyzer("custom_analyzer");
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment,
AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment,
maxTokenCount);
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
List<AnalyzeAction.AnalyzeToken> tokens = analyze.getTokens();
assertEquals(3, tokens.size());
assertEquals("quick", tokens.get(0).getTerm());
assertEquals("brown", tokens.get(1).getTerm());
@ -263,7 +262,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
public void testGetIndexAnalyserWithoutIndexAnalyzers() throws IOException {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> TransportAnalyzeAction.analyze(
new AnalyzeRequest()
new AnalyzeAction.Request()
.analyzer("custom_analyzer")
.text("the qu1ck brown fox-dog"),
"text", null, null, registry, environment, maxTokenCount));
@ -274,7 +273,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
boolean notGlobal = randomBoolean();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> TransportAnalyzeAction.analyze(
new AnalyzeRequest()
new AnalyzeAction.Request()
.analyzer("foobar")
.text("the qu1ck brown fox"),
"text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount));
@ -286,7 +285,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
e = expectThrows(IllegalArgumentException.class,
() -> TransportAnalyzeAction.analyze(
new AnalyzeRequest()
new AnalyzeAction.Request()
.tokenizer("foobar")
.text("the qu1ck brown fox"),
"text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount));
@ -298,7 +297,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
e = expectThrows(IllegalArgumentException.class,
() -> TransportAnalyzeAction.analyze(
new AnalyzeRequest()
new AnalyzeAction.Request()
.tokenizer("standard")
.addTokenFilter("foobar")
.text("the qu1ck brown fox"),
@ -311,7 +310,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
e = expectThrows(IllegalArgumentException.class,
() -> TransportAnalyzeAction.analyze(
new AnalyzeRequest()
new AnalyzeAction.Request()
.tokenizer("standard")
.addTokenFilter("lowercase")
.addCharFilter("foobar")
@ -325,7 +324,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
e = expectThrows(IllegalArgumentException.class,
() -> TransportAnalyzeAction.analyze(
new AnalyzeRequest()
new AnalyzeAction.Request()
.normalizer("foobar")
.text("the qu1ck brown fox"),
"text", null, indexAnalyzers, registry, environment, maxTokenCount));
@ -333,13 +332,13 @@ public class TransportAnalyzeActionTests extends ESTestCase {
}
public void testNonPreBuildTokenFilter() throws IOException {
AnalyzeRequest request = new AnalyzeRequest();
AnalyzeAction.Request request = new AnalyzeAction.Request();
request.tokenizer("standard");
request.addTokenFilter("stop"); // stop token filter is not prebuilt in AnalysisModule#setupPreConfiguredTokenFilters()
request.text("the quick brown fox");
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment,
AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment,
maxTokenCount);
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
List<AnalyzeAction.AnalyzeToken> tokens = analyze.getTokens();
assertEquals(3, tokens.size());
assertEquals("quick", tokens.get(0).getTerm());
assertEquals("brown", tokens.get(1).getTerm());
@ -347,12 +346,12 @@ public class TransportAnalyzeActionTests extends ESTestCase {
}
public void testNormalizerWithIndex() throws IOException {
AnalyzeRequest request = new AnalyzeRequest("index");
AnalyzeAction.Request request = new AnalyzeAction.Request("index");
request.normalizer("my_normalizer");
request.text("ABc");
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment,
AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment,
maxTokenCount);
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
List<AnalyzeAction.AnalyzeToken> tokens = analyze.getTokens();
assertEquals(1, tokens.size());
assertEquals("abc", tokens.get(0).getTerm());
@ -372,7 +371,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
String text = sbText.toString();
// request with explain=false to test simpleAnalyze path in TransportAnalyzeAction
AnalyzeRequest request = new AnalyzeRequest();
AnalyzeAction.Request request = new AnalyzeAction.Request();
request.text(text);
request.analyzer("standard");
IllegalStateException e = expectThrows(IllegalStateException.class,
@ -382,7 +381,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
+ maxTokenCount + "]." + " This limit can be set by changing the [index.analyze.max_token_count] index level setting.");
// request with explain=true to test detailAnalyze path in TransportAnalyzeAction
AnalyzeRequest request2 = new AnalyzeRequest();
AnalyzeAction.Request request2 = new AnalyzeAction.Request();
request2.text(text);
request2.analyzer("standard");
request2.explain(true);
@ -406,7 +405,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
}
String text = sbText.toString();
AnalyzeRequest request = new AnalyzeRequest();
AnalyzeAction.Request request = new AnalyzeAction.Request();
request.text(text);
request.analyzer("standard");
IllegalStateException e = expectThrows(IllegalStateException.class,

View File

@ -30,7 +30,7 @@ import java.io.IOException;
public class AnalyzeRequestTests extends ESTestCase {
public void testValidation() throws Exception {
AnalyzeRequest request = new AnalyzeRequest();
AnalyzeAction.Request request = new AnalyzeAction.Request();
ActionRequestValidationException e = request.validate();
assertNotNull("text validation should fail", e);
@ -60,7 +60,7 @@ public class AnalyzeRequestTests extends ESTestCase {
e = request.validate();
assertTrue(e.getMessage().contains("tokenizer/analyze should be null if normalizer is specified"));
AnalyzeRequest requestAnalyzer = new AnalyzeRequest("index");
AnalyzeAction.Request requestAnalyzer = new AnalyzeAction.Request("index");
requestAnalyzer.normalizer("some normalizer");
requestAnalyzer.text("something");
requestAnalyzer.analyzer("analyzer");
@ -69,7 +69,7 @@ public class AnalyzeRequestTests extends ESTestCase {
}
public void testSerialization() throws IOException {
AnalyzeRequest request = new AnalyzeRequest("foo");
AnalyzeAction.Request request = new AnalyzeAction.Request("foo");
request.text("a", "b");
request.tokenizer("tokenizer");
request.addTokenFilter("tokenfilter");
@ -79,7 +79,7 @@ public class AnalyzeRequestTests extends ESTestCase {
try (BytesStreamOutput output = new BytesStreamOutput()) {
request.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
AnalyzeRequest serialized = new AnalyzeRequest();
AnalyzeAction.Request serialized = new AnalyzeAction.Request();
serialized.readFrom(in);
assertArrayEquals(request.text(), serialized.text());
assertEquals(request.tokenizer().name, serialized.tokenizer().name);

View File

@ -20,124 +20,35 @@
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import static org.hamcrest.Matchers.equalTo;
public class AnalyzeResponseTests extends AbstractSerializingTestCase<AnalyzeResponse> {
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
return s -> s.contains("tokens.");
}
@Override
protected AnalyzeResponse doParseInstance(XContentParser parser) throws IOException {
return AnalyzeResponse.fromXContent(parser);
}
@Override
protected Writeable.Reader<AnalyzeResponse> instanceReader() {
return AnalyzeResponse::new;
}
@Override
protected AnalyzeResponse createTestInstance() {
int tokenCount = randomIntBetween(1, 30);
AnalyzeResponse.AnalyzeToken[] tokens = new AnalyzeResponse.AnalyzeToken[tokenCount];
for (int i = 0; i < tokenCount; i++) {
tokens[i] = randomToken();
}
if (randomBoolean()) {
DetailAnalyzeResponse.CharFilteredText[] charfilters = null;
DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = null;
if (randomBoolean()) {
charfilters = new DetailAnalyzeResponse.CharFilteredText[]{
new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"})
};
}
if (randomBoolean()) {
tokenfilters = new DetailAnalyzeResponse.AnalyzeTokenList[]{
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens),
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens)
};
}
DetailAnalyzeResponse dar = new DetailAnalyzeResponse(
charfilters,
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens),
tokenfilters);
return new AnalyzeResponse(null, dar);
}
return new AnalyzeResponse(Arrays.asList(tokens), null);
}
private AnalyzeResponse.AnalyzeToken randomToken() {
String token = randomAlphaOfLengthBetween(1, 20);
int position = randomIntBetween(0, 1000);
int startOffset = randomIntBetween(0, 1000);
int endOffset = randomIntBetween(0, 1000);
int posLength = randomIntBetween(1, 5);
String type = randomAlphaOfLengthBetween(1, 20);
Map<String, Object> extras = new HashMap<>();
if (randomBoolean()) {
int entryCount = randomInt(6);
for (int i = 0; i < entryCount; i++) {
switch (randomInt(6)) {
case 0:
case 1:
case 2:
case 3:
String key = randomAlphaOfLength(5);
String value = randomAlphaOfLength(10);
extras.put(key, value);
break;
case 4:
String objkey = randomAlphaOfLength(5);
Map<String, String> obj = new HashMap<>();
obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
extras.put(objkey, obj);
break;
case 5:
String listkey = randomAlphaOfLength(5);
List<String> list = new ArrayList<>();
list.add(randomAlphaOfLength(4));
list.add(randomAlphaOfLength(6));
extras.put(listkey, list);
break;
}
}
}
return new AnalyzeResponse.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras);
}
public class AnalyzeResponseTests extends ESTestCase {
@SuppressWarnings("unchecked")
public void testNullResponseToXContent() throws IOException {
DetailAnalyzeResponse.CharFilteredText[] charfilters = null;
AnalyzeAction.CharFilteredText[] charfilters = null;
String name = "test_tokens_null";
AnalyzeResponse.AnalyzeToken[] tokens = null;
DetailAnalyzeResponse.AnalyzeTokenList tokenizer = null;
AnalyzeAction.AnalyzeToken[] tokens = null;
AnalyzeAction.AnalyzeTokenList tokenizer = null;
DetailAnalyzeResponse.AnalyzeTokenList tokenfiltersItem = new DetailAnalyzeResponse.AnalyzeTokenList(name, tokens);
DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = {tokenfiltersItem};
AnalyzeAction.AnalyzeTokenList tokenfiltersItem = new AnalyzeAction.AnalyzeTokenList(name, tokens);
AnalyzeAction.AnalyzeTokenList[] tokenfilters = {tokenfiltersItem};
DetailAnalyzeResponse detail = new DetailAnalyzeResponse(charfilters, tokenizer, tokenfilters);
AnalyzeAction.DetailAnalyzeResponse detail = new AnalyzeAction.DetailAnalyzeResponse(charfilters, tokenizer, tokenfilters);
AnalyzeResponse response = new AnalyzeResponse(null, detail);
AnalyzeAction.Response response = new AnalyzeAction.Response(null, detail);
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
Map<String, Object> converted = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2();

View File

@ -19,8 +19,8 @@
package org.elasticsearch.indices.analyze;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
@ -53,9 +53,9 @@ public class AnalyzeActionIT extends ESIntegTestCase {
ensureGreen();
for (int i = 0; i < 10; i++) {
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "this is a test").get();
AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "this is a test").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0);
assertThat(token.getTerm(), equalTo("this"));
assertThat(token.getStartOffset(), equalTo(0));
assertThat(token.getEndOffset(), equalTo(4));
@ -94,7 +94,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
}
public void testAnalyzeWithNoIndex() throws Exception {
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setAnalyzer("simple").get();
AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setAnalyzer("simple").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("keyword").addTokenFilter("lowercase")
@ -105,7 +105,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("standard").addTokenFilter("lowercase")
.get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0);
assertThat(token.getTerm(), equalTo("this"));
token = analyzeResponse.getTokens().get(1);
assertThat(token.getTerm(), equalTo("is"));
@ -134,9 +134,9 @@ public class AnalyzeActionIT extends ESIntegTestCase {
final AnalyzeRequestBuilder requestBuilder = client().admin().indices().prepareAnalyze("THIS IS A TEST");
requestBuilder.setIndex(indexOrAlias());
requestBuilder.setField("document.simple");
AnalyzeResponse analyzeResponse = requestBuilder.get();
AnalyzeAction.Response analyzeResponse = requestBuilder.get();
assertThat(analyzeResponse.getTokens().size(), equalTo(4));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(3);
AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(3);
assertThat(token.getTerm(), equalTo("test"));
assertThat(token.getStartOffset(), equalTo(10));
assertThat(token.getEndOffset(), equalTo(14));
@ -146,7 +146,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
// issue #5974
public void testThatStandardAndDefaultAnalyzersAreSame() throws Exception {
AnalyzeResponse response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("standard").get();
AnalyzeAction.Response response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("standard").get();
assertTokens(response, "this", "is", "a", "test");
response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("default").get();
@ -156,7 +156,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
assertTokens(response, "this", "is", "a", "test");
}
private void assertTokens(AnalyzeResponse response, String ... tokens) {
private void assertTokens(AnalyzeAction.Response response, String ... tokens) {
assertThat(response.getTokens(), hasSize(tokens.length));
for (int i = 0; i < tokens.length; i++) {
assertThat(response.getTokens().get(i).getTerm(), is(tokens[i]));
@ -180,9 +180,9 @@ public class AnalyzeActionIT extends ESIntegTestCase {
requestBuilder.setText(texts);
requestBuilder.setIndex(indexOrAlias());
requestBuilder.setField("simple");
AnalyzeResponse analyzeResponse = requestBuilder.get();
AnalyzeAction.Response analyzeResponse = requestBuilder.get();
assertThat(analyzeResponse.getTokens().size(), equalTo(7));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(3);
AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(3);
assertThat(token.getTerm(), equalTo("test"));
assertThat(token.getPosition(), equalTo(3));
assertThat(token.getStartOffset(), equalTo(10));
@ -199,7 +199,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
public void testDetailAnalyzeWithNoIndex() throws Exception {
//analyzer only
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST")
AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST")
.setExplain(true).setAnalyzer("simple").get();
assertThat(analyzeResponse.detail().tokenizer(), IsNull.nullValue());
@ -211,7 +211,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
public void testDetailAnalyzeCustomAnalyzerWithNoIndex() throws Exception {
//analyzer only
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST")
AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST")
.setExplain(true).setAnalyzer("simple").get();
assertThat(analyzeResponse.detail().tokenizer(), IsNull.nullValue());
@ -257,12 +257,12 @@ public class AnalyzeActionIT extends ESIntegTestCase {
.setType("document").setSource("simple", "type=text,analyzer=simple,position_increment_gap=100").get();
String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"};
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts)
AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts)
.setExplain(true).setField("simple").setText(texts).execute().get();
assertThat(analyzeResponse.detail().analyzer().getName(), equalTo("simple"));
assertThat(analyzeResponse.detail().analyzer().getTokens().length, equalTo(7));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.detail().analyzer().getTokens()[3];
AnalyzeAction.AnalyzeToken token = analyzeResponse.detail().analyzer().getTokens()[3];
assertThat(token.getTerm(), equalTo("test"));
assertThat(token.getPosition(), equalTo(3));
@ -292,7 +292,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
Map<String, Object> stopFilterSettings = new HashMap<>();
stopFilterSettings.put("type", "stop");
stopFilterSettings.put("stopwords", new String[]{"foo", "buzz"});
AnalyzeResponse analyzeResponse = client().admin().indices()
AnalyzeAction.Response analyzeResponse = client().admin().indices()
.prepareAnalyze()
.setText("Foo buzz test")
.setTokenizer("standard")
@ -359,9 +359,9 @@ public class AnalyzeActionIT extends ESIntegTestCase {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("test", "keyword", "type=keyword"));
ensureGreen("test");
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get();
AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0);
assertThat(token.getTerm(), equalTo("ABC"));
assertThat(token.getStartOffset(), equalTo(0));
assertThat(token.getEndOffset(), equalTo(3));
@ -377,9 +377,9 @@ public class AnalyzeActionIT extends ESIntegTestCase {
.addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer"));
ensureGreen("test");
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get();
AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get();
assertThat(analyzeResponse.getTokens().size(), equalTo(1));
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0);
assertThat(token.getTerm(), equalTo("abc"));
assertThat(token.getStartOffset(), equalTo(0));
assertThat(token.getEndOffset(), equalTo(3));

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.rest.action.admin.indices;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -29,9 +29,11 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
import static org.mockito.Mockito.mock;
public class RestAnalyzeActionTests extends ESTestCase {
@ -44,15 +46,13 @@ public class RestAnalyzeActionTests extends ESTestCase {
.array("filter", "lowercase")
.endObject())) {
AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
RestAnalyzeAction.buildFromContent(content, analyzeRequest);
AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(content, "for test");
assertThat(analyzeRequest.text().length, equalTo(1));
assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"}));
assertThat(analyzeRequest.tokenizer().name, equalTo("keyword"));
assertThat(analyzeRequest.tokenFilters().size(), equalTo(1));
for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) {
for (AnalyzeAction.Request.NameOrDefinition filter : analyzeRequest.tokenFilters()) {
assertThat(filter.name, equalTo("lowercase"));
}
}
@ -79,9 +79,7 @@ public class RestAnalyzeActionTests extends ESTestCase {
.field("normalizer", "normalizer")
.endObject())) {
AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
RestAnalyzeAction.buildFromContent(content, analyzeRequest);
AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(content, "for test");
assertThat(analyzeRequest.text().length, equalTo(1));
assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"}));
@ -95,48 +93,45 @@ public class RestAnalyzeActionTests extends ESTestCase {
}
}
public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception {
public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() {
RestAnalyzeAction action = new RestAnalyzeAction(Settings.EMPTY, mock(RestController.class));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry())
.withContent(new BytesArray("{invalid_json}"), XContentType.JSON).build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, null, null));
assertThat(e.getMessage(), equalTo("Failed to parse request body"));
IOException e = expectThrows(IOException.class, () -> action.handleRequest(request, null, null));
assertThat(e.getMessage(), containsString("expecting double-quote"));
}
public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception {
AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
try (XContentParser invalidContent = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("unknown", "keyword")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest));
assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
() -> AnalyzeAction.Request.fromXContent(invalidContent, "for test"));
assertThat(e.getMessage(), containsString("unknown field [unknown]"));
}
}
public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception {
AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("explain", "fals")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest));
assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'"));
() -> AnalyzeAction.Request.fromXContent(invalidExplain, "for test"));
assertThat(e.getMessage(), containsString("failed to parse field [explain]"));
}
}
public void testParseXContentForAnalyzeRequestWithInvalidNormalizerThrowsException() throws Exception {
AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("normalizer", true)
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest));
assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name"));
() -> AnalyzeAction.Request.fromXContent(invalidExplain, "for test"));
assertThat(e.getMessage(), containsString("normalizer doesn't support values of type: VALUE_BOOLEAN"));
}
}
@ -147,9 +142,9 @@ public class RestAnalyzeActionTests extends ESTestCase {
.field("tokenizer", "keyword")
.array("filters", "lowercase")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser,
new AnalyzeRequest("for test")));
assertThat(e.getMessage(), startsWith("Unknown parameter [filters]"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(parser,"for test"));
assertThat(e.getMessage(), containsString("unknown field [filters]"));
}
try (XContentParser parser = createParser(XContentFactory.jsonBuilder()
@ -158,9 +153,9 @@ public class RestAnalyzeActionTests extends ESTestCase {
.field("tokenizer", "keyword")
.array("token_filters", "lowercase")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser,
new AnalyzeRequest("for test")));
assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(parser, "for test"));
assertThat(e.getMessage(), containsString("unknown field [token_filters]"));
}
try (XContentParser parser = createParser(XContentFactory.jsonBuilder()
@ -169,9 +164,9 @@ public class RestAnalyzeActionTests extends ESTestCase {
.field("tokenizer", "keyword")
.array("char_filters", "lowercase")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser,
new AnalyzeRequest("for test")));
assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(parser, "for test"));
assertThat(e.getMessage(), containsString("unknown field [char_filters]"));
}
try (XContentParser parser = createParser(XContentFactory.jsonBuilder()
@ -180,9 +175,9 @@ public class RestAnalyzeActionTests extends ESTestCase {
.field("tokenizer", "keyword")
.array("token_filter", "lowercase")
.endObject())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser,
new AnalyzeRequest("for test")));
assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalyzeAction.Request.fromXContent(parser, "for test"));
assertThat(e.getMessage(), containsString("unknown field [token_filter]"));
}
}
}

View File

@ -6,7 +6,6 @@
package org.elasticsearch.xpack.security.action;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.search.ClearScrollAction;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.transport.TransportRequest;
@ -36,8 +35,8 @@ public class SecurityActionMapper {
break;
case AnalyzeAction.NAME:
case AnalyzeAction.NAME + "[s]":
assert request instanceof AnalyzeRequest;
String[] indices = ((AnalyzeRequest) request).indices();
assert request instanceof AnalyzeAction.Request;
String[] indices = ((AnalyzeAction.Request) request).indices();
if (indices == null || (indices.length == 1 && indices[0] == null)) {
return CLUSTER_PERMISSION_ANALYZE;
}

View File

@ -6,7 +6,6 @@
package org.elasticsearch.xpack.security.action;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.search.ClearScrollAction;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.test.ESTestCase;
@ -73,11 +72,11 @@ public class SecurityActionMapperTests extends ESTestCase {
public void testIndicesAnalyze() {
SecurityActionMapper securityActionMapper = new SecurityActionMapper();
AnalyzeRequest analyzeRequest;
AnalyzeAction.Request analyzeRequest;
if (randomBoolean()) {
analyzeRequest = new AnalyzeRequest(randomAlphaOfLength(randomIntBetween(1, 30))).text("text");
analyzeRequest = new AnalyzeAction.Request(randomAlphaOfLength(randomIntBetween(1, 30))).text("text");
} else {
analyzeRequest = new AnalyzeRequest(null).text("text");
analyzeRequest = new AnalyzeAction.Request(null).text("text");
analyzeRequest.index(randomAlphaOfLength(randomIntBetween(1, 30)));
}
assertThat(securityActionMapper.action(AnalyzeAction.NAME, analyzeRequest), equalTo(AnalyzeAction.NAME));
@ -85,7 +84,7 @@ public class SecurityActionMapperTests extends ESTestCase {
public void testClusterAnalyze() {
SecurityActionMapper securityActionMapper = new SecurityActionMapper();
AnalyzeRequest analyzeRequest = new AnalyzeRequest(null).text("text");
AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request(null).text("text");
assertThat(securityActionMapper.action(AnalyzeAction.NAME, analyzeRequest),
equalTo(SecurityActionMapper.CLUSTER_PERMISSION_ANALYZE));
}