Cut AnalyzeResponse over to Writeable (#41915)

This commit makes AnalyzeResponse and its various helper classes implement
Writeable. The classes are also now immutable.

Relates to #34389
This commit is contained in:
Alan Woodward 2019-05-09 13:08:33 +01:00
parent 8e33a5292a
commit 309e4a11b5
6 changed files with 156 additions and 173 deletions

View File

@ -20,6 +20,7 @@
package org.elasticsearch.action.admin.indices.analyze; package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.common.io.stream.Writeable;
public class AnalyzeAction extends Action<AnalyzeResponse> { public class AnalyzeAction extends Action<AnalyzeResponse> {
@ -30,8 +31,13 @@ public class AnalyzeAction extends Action<AnalyzeResponse> {
super(NAME); super(NAME);
} }
@Override
public Writeable.Reader<AnalyzeResponse> getResponseReader() {
return AnalyzeResponse::new;
}
@Override @Override
public AnalyzeResponse newResponse() { public AnalyzeResponse newResponse() {
return new AnalyzeResponse(); throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
} }
} }

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -43,17 +43,14 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeResponse.AnalyzeToken>, ToXContentObject { public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeResponse.AnalyzeToken>, ToXContentObject {
public static class AnalyzeToken implements Streamable, ToXContentObject { public static class AnalyzeToken implements Writeable, ToXContentObject {
private String term; private final String term;
private int startOffset; private final int startOffset;
private int endOffset; private final int endOffset;
private int position; private final int position;
private int positionLength = 1; private final int positionLength;
private Map<String, Object> attributes; private final Map<String, Object> attributes;
private String type; private final String type;
AnalyzeToken() {
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
@ -85,6 +82,21 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
this.attributes = attributes; this.attributes = attributes;
} }
public AnalyzeToken(StreamInput in) throws IOException {
term = in.readString();
startOffset = in.readInt();
endOffset = in.readInt();
position = in.readVInt();
Integer len = in.readOptionalVInt();
if (len != null) {
positionLength = len;
} else {
positionLength = 1;
}
type = in.readOptionalString();
attributes = in.readMap();
}
public String getTerm() { public String getTerm() {
return this.term; return this.term;
} }
@ -134,12 +146,6 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
return builder; return builder;
} }
public static AnalyzeToken readAnalyzeToken(StreamInput in) throws IOException {
AnalyzeToken analyzeToken = new AnalyzeToken();
analyzeToken.readFrom(in);
return analyzeToken;
}
public static AnalyzeToken fromXContent(XContentParser parser) throws IOException { public static AnalyzeToken fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
String field = null; String field = null;
@ -184,22 +190,6 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes); return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes);
} }
@Override
public void readFrom(StreamInput in) throws IOException {
term = in.readString();
startOffset = in.readInt();
endOffset = in.readInt();
position = in.readVInt();
Integer len = in.readOptionalVInt();
if (len != null) {
positionLength = len;
} else {
positionLength = 1;
}
type = in.readOptionalString();
attributes = in.readMap();
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(term); out.writeString(term);
@ -212,18 +202,35 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
} }
} }
private DetailAnalyzeResponse detail; private final DetailAnalyzeResponse detail;
private List<AnalyzeToken> tokens; private final List<AnalyzeToken> tokens;
AnalyzeResponse() {
}
public AnalyzeResponse(List<AnalyzeToken> tokens, DetailAnalyzeResponse detail) { public AnalyzeResponse(List<AnalyzeToken> tokens, DetailAnalyzeResponse detail) {
this.tokens = tokens; this.tokens = tokens;
this.detail = detail; this.detail = detail;
} }
public AnalyzeResponse(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
if (size > 0) {
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(new AnalyzeToken(in));
}
}
else {
tokens = null;
}
detail = in.readOptionalWriteable(DetailAnalyzeResponse::new);
}
@Override
public void readFrom(StreamInput in) throws IOException {
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
public List<AnalyzeToken> getTokens() { public List<AnalyzeToken> getTokens() {
return this.tokens; return this.tokens;
} }
@ -268,20 +275,6 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
return PARSER.parse(parser, null); return PARSER.parse(parser, null);
} }
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(AnalyzeToken.readAnalyzeToken(in));
}
if (tokens.size() == 0) {
tokens = null;
}
detail = in.readOptionalStreamable(DetailAnalyzeResponse::new);
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
@ -293,7 +286,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
} else { } else {
out.writeVInt(0); out.writeVInt(0);
} }
out.writeOptionalStreamable(detail); out.writeOptionalWriteable(detail);
} }
@Override @Override

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
@ -40,16 +40,13 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DetailAnalyzeResponse implements Streamable, ToXContentFragment { public class DetailAnalyzeResponse implements Writeable, ToXContentFragment {
private boolean customAnalyzer = false; private final boolean customAnalyzer;
private AnalyzeTokenList analyzer; private final AnalyzeTokenList analyzer;
private CharFilteredText[] charfilters; private final CharFilteredText[] charfilters;
private AnalyzeTokenList tokenizer; private final AnalyzeTokenList tokenizer;
private AnalyzeTokenList[] tokenfilters; private final AnalyzeTokenList[] tokenfilters;
DetailAnalyzeResponse() {
}
public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { public DetailAnalyzeResponse(AnalyzeTokenList analyzer) {
this(false, analyzer, null, null, null); this(false, analyzer, null, null, null);
@ -71,46 +68,55 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
this.tokenfilters = tokenfilters; this.tokenfilters = tokenfilters;
} }
public AnalyzeTokenList analyzer() { public DetailAnalyzeResponse(StreamInput in) throws IOException {
return this.analyzer; this.customAnalyzer = in.readBoolean();
if (customAnalyzer) {
tokenizer = new AnalyzeTokenList(in);
int size = in.readVInt();
if (size > 0) {
charfilters = new CharFilteredText[size];
for (int i = 0; i < size; i++) {
charfilters[i] = new CharFilteredText(in);
}
}
else {
charfilters = null;
}
size = in.readVInt();
if (size > 0) {
tokenfilters = new AnalyzeTokenList[size];
for (int i = 0; i < size; i++) {
tokenfilters[i] = new AnalyzeTokenList(in);
}
}
else {
tokenfilters = null;
}
analyzer = null;
} else {
analyzer = new AnalyzeTokenList(in);
tokenfilters = null;
tokenizer = null;
charfilters = null;
}
} }
public DetailAnalyzeResponse analyzer(AnalyzeTokenList analyzer) { public AnalyzeTokenList analyzer() {
this.customAnalyzer = false; return this.analyzer;
this.analyzer = analyzer;
return this;
} }
public CharFilteredText[] charfilters() { public CharFilteredText[] charfilters() {
return this.charfilters; return this.charfilters;
} }
public DetailAnalyzeResponse charfilters(CharFilteredText[] charfilters) {
this.customAnalyzer = true;
this.charfilters = charfilters;
return this;
}
public AnalyzeTokenList tokenizer() { public AnalyzeTokenList tokenizer() {
return tokenizer; return tokenizer;
} }
public DetailAnalyzeResponse tokenizer(AnalyzeTokenList tokenizer) {
this.customAnalyzer = true;
this.tokenizer = tokenizer;
return this;
}
public AnalyzeTokenList[] tokenfilters() { public AnalyzeTokenList[] tokenfilters() {
return tokenfilters; return tokenfilters;
} }
public DetailAnalyzeResponse tokenfilters(AnalyzeTokenList[] tokenfilters) {
this.customAnalyzer = true;
this.tokenfilters = tokenfilters;
return this;
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
@ -201,30 +207,6 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
static final String TOKENFILTERS = "tokenfilters"; static final String TOKENFILTERS = "tokenfilters";
} }
@Override
public void readFrom(StreamInput in) throws IOException {
this.customAnalyzer = in.readBoolean();
if (customAnalyzer) {
tokenizer = AnalyzeTokenList.readAnalyzeTokenList(in);
int size = in.readVInt();
if (size > 0) {
charfilters = new CharFilteredText[size];
for (int i = 0; i < size; i++) {
charfilters[i] = CharFilteredText.readCharFilteredText(in);
}
}
size = in.readVInt();
if (size > 0) {
tokenfilters = new AnalyzeTokenList[size];
for (int i = 0; i < size; i++) {
tokenfilters[i] = AnalyzeTokenList.readAnalyzeTokenList(in);
}
}
} else {
analyzer = AnalyzeTokenList.readAnalyzeTokenList(in);
}
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(customAnalyzer); out.writeBoolean(customAnalyzer);
@ -251,9 +233,9 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
} }
} }
public static class AnalyzeTokenList implements Streamable, ToXContentObject { public static class AnalyzeTokenList implements Writeable, ToXContentObject {
private String name; private final String name;
private AnalyzeResponse.AnalyzeToken[] tokens; private final AnalyzeResponse.AnalyzeToken[] tokens;
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
@ -271,14 +253,25 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return result; return result;
} }
AnalyzeTokenList() {
}
public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) { public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) {
this.name = name; this.name = name;
this.tokens = tokens; this.tokens = tokens;
} }
public AnalyzeTokenList(StreamInput in) throws IOException {
name = in.readString();
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeResponse.AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = new AnalyzeResponse.AnalyzeToken(in);
}
}
else {
tokens = null;
}
}
public String getName() { public String getName() {
return name; return name;
} }
@ -287,12 +280,6 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return tokens; return tokens;
} }
public static AnalyzeTokenList readAnalyzeTokenList(StreamInput in) throws IOException {
AnalyzeTokenList list = new AnalyzeTokenList();
list.readFrom(in);
return list;
}
XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.NAME, this.name); builder.field(Fields.NAME, this.name);
builder.startArray(AnalyzeResponse.Fields.TOKENS); builder.startArray(AnalyzeResponse.Fields.TOKENS);
@ -327,18 +314,6 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return PARSER.parse(parser, null); return PARSER.parse(parser, null);
} }
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeResponse.AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = AnalyzeResponse.AnalyzeToken.readAnalyzeToken(in);
}
}
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(name); out.writeString(name);
@ -353,12 +328,9 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
} }
} }
public static class CharFilteredText implements Streamable, ToXContentObject { public static class CharFilteredText implements Writeable, ToXContentObject {
private String name; private final String name;
private String[] texts; private final String[] texts;
CharFilteredText() {
}
public CharFilteredText(String name, String[] texts) { public CharFilteredText(String name, String[] texts) {
this.name = name; this.name = name;
@ -369,6 +341,11 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
} }
} }
public CharFilteredText(StreamInput in) throws IOException {
name = in.readString();
texts = in.readStringArray();
}
public String getName() { public String getName() {
return name; return name;
} }
@ -398,18 +375,6 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return PARSER.parse(parser, null); return PARSER.parse(parser, null);
} }
public static CharFilteredText readCharFilteredText(StreamInput in) throws IOException {
CharFilteredText text = new CharFilteredText();
text.readFrom(in);
return text;
}
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
texts = in.readStringArray();
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(name); out.writeString(name);

View File

@ -40,6 +40,7 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
@ -97,7 +98,12 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
@Override @Override
protected AnalyzeResponse newResponse() { protected AnalyzeResponse newResponse() {
return new AnalyzeResponse(); throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
protected Writeable.Reader<AnalyzeResponse> getResponseReader() {
return AnalyzeResponse::new;
} }
@Override @Override

View File

@ -38,6 +38,7 @@ import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
@ -118,8 +119,18 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
} }
}); });
} }
@Deprecated
protected abstract Response newResponse(); protected abstract Response newResponse();
protected Writeable.Reader<Response> getResponseReader() {
return in -> {
Response response = newResponse();
response.readFrom(in);
return response;
};
}
protected abstract boolean resolveIndex(Request request); protected abstract boolean resolveIndex(Request request);
protected ClusterBlockException checkGlobalBlock(ClusterState state) { protected ClusterBlockException checkGlobalBlock(ClusterState state) {
@ -182,13 +193,12 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
public void start() { public void start() {
if (shardIt == null) { if (shardIt == null) {
// just execute it on the local node // just execute it on the local node
final Writeable.Reader<Response> reader = getResponseReader();
transportService.sendRequest(clusterService.localNode(), transportShardAction, internalRequest.request(), transportService.sendRequest(clusterService.localNode(), transportShardAction, internalRequest.request(),
new TransportResponseHandler<Response>() { new TransportResponseHandler<Response>() {
@Override @Override
public Response read(StreamInput in) throws IOException { public Response read(StreamInput in) throws IOException {
Response response = newResponse(); return reader.read(in);
response.readFrom(in);
return response;
} }
@Override @Override
@ -251,14 +261,13 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
node node
); );
} }
final Writeable.Reader<Response> reader = getResponseReader();
transportService.sendRequest(node, transportShardAction, internalRequest.request(), transportService.sendRequest(node, transportShardAction, internalRequest.request(),
new TransportResponseHandler<Response>() { new TransportResponseHandler<Response>() {
@Override @Override
public Response read(StreamInput in) throws IOException { public Response read(StreamInput in) throws IOException {
Response response = newResponse(); return reader.read(in);
response.readFrom(in);
return response;
} }
@Override @Override

View File

@ -20,12 +20,13 @@
package org.elasticsearch.action.admin.indices.analyze; package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -37,7 +38,7 @@ import java.util.function.Predicate;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase<AnalyzeResponse> { public class AnalyzeResponseTests extends AbstractSerializingTestCase<AnalyzeResponse> {
@Override @Override
protected Predicate<String> getRandomFieldsExcludeFilter() { protected Predicate<String> getRandomFieldsExcludeFilter() {
@ -50,8 +51,8 @@ public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase<Ana
} }
@Override @Override
protected AnalyzeResponse createBlankInstance() { protected Writeable.Reader<AnalyzeResponse> instanceReader() {
return new AnalyzeResponse(); return AnalyzeResponse::new;
} }
@Override @Override
@ -61,21 +62,24 @@ public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase<Ana
for (int i = 0; i < tokenCount; i++) { for (int i = 0; i < tokenCount; i++) {
tokens[i] = randomToken(); tokens[i] = randomToken();
} }
DetailAnalyzeResponse dar = null;
if (randomBoolean()) { if (randomBoolean()) {
dar = new DetailAnalyzeResponse(); DetailAnalyzeResponse.CharFilteredText[] charfilters = null;
DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = null;
if (randomBoolean()) { if (randomBoolean()) {
dar.charfilters(new DetailAnalyzeResponse.CharFilteredText[]{ charfilters = new DetailAnalyzeResponse.CharFilteredText[]{
new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"}) new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"})
}); };
} }
dar.tokenizer(new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens));
if (randomBoolean()) { if (randomBoolean()) {
dar.tokenfilters(new DetailAnalyzeResponse.AnalyzeTokenList[]{ tokenfilters = new DetailAnalyzeResponse.AnalyzeTokenList[]{
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens), new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens),
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens) new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens)
}); };
} }
DetailAnalyzeResponse dar = new DetailAnalyzeResponse(
charfilters,
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens),
tokenfilters);
return new AnalyzeResponse(null, dar); return new AnalyzeResponse(null, dar);
} }
return new AnalyzeResponse(Arrays.asList(tokens), null); return new AnalyzeResponse(Arrays.asList(tokens), null);