Cut AnalyzeResponse over to Writeable (#41915)

This commit makes AnalyzeResponse and its various helper classes implement
Writeable. The classes are also now immutable.

Relates to #34389
This commit is contained in:
Alan Woodward 2019-05-09 13:08:33 +01:00
parent 8e33a5292a
commit 309e4a11b5
6 changed files with 156 additions and 173 deletions

View File

@ -20,6 +20,7 @@
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.action.Action;
import org.elasticsearch.common.io.stream.Writeable;
public class AnalyzeAction extends Action<AnalyzeResponse> {
@ -30,8 +31,13 @@ public class AnalyzeAction extends Action<AnalyzeResponse> {
super(NAME);
}
@Override
public Writeable.Reader<AnalyzeResponse> getResponseReader() {
return AnalyzeResponse::new;
}
@Override
public AnalyzeResponse newResponse() {
return new AnalyzeResponse();
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
}

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -43,17 +43,14 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeResponse.AnalyzeToken>, ToXContentObject {
public static class AnalyzeToken implements Streamable, ToXContentObject {
private String term;
private int startOffset;
private int endOffset;
private int position;
private int positionLength = 1;
private Map<String, Object> attributes;
private String type;
AnalyzeToken() {
}
public static class AnalyzeToken implements Writeable, ToXContentObject {
private final String term;
private final int startOffset;
private final int endOffset;
private final int position;
private final int positionLength;
private final Map<String, Object> attributes;
private final String type;
@Override
public boolean equals(Object o) {
@ -85,6 +82,21 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
this.attributes = attributes;
}
public AnalyzeToken(StreamInput in) throws IOException {
term = in.readString();
startOffset = in.readInt();
endOffset = in.readInt();
position = in.readVInt();
Integer len = in.readOptionalVInt();
if (len != null) {
positionLength = len;
} else {
positionLength = 1;
}
type = in.readOptionalString();
attributes = in.readMap();
}
public String getTerm() {
return this.term;
}
@ -134,12 +146,6 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
return builder;
}
public static AnalyzeToken readAnalyzeToken(StreamInput in) throws IOException {
AnalyzeToken analyzeToken = new AnalyzeToken();
analyzeToken.readFrom(in);
return analyzeToken;
}
public static AnalyzeToken fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
String field = null;
@ -184,22 +190,6 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes);
}
@Override
public void readFrom(StreamInput in) throws IOException {
term = in.readString();
startOffset = in.readInt();
endOffset = in.readInt();
position = in.readVInt();
Integer len = in.readOptionalVInt();
if (len != null) {
positionLength = len;
} else {
positionLength = 1;
}
type = in.readOptionalString();
attributes = in.readMap();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(term);
@ -212,18 +202,35 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
}
}
private DetailAnalyzeResponse detail;
private final DetailAnalyzeResponse detail;
private List<AnalyzeToken> tokens;
AnalyzeResponse() {
}
private final List<AnalyzeToken> tokens;
public AnalyzeResponse(List<AnalyzeToken> tokens, DetailAnalyzeResponse detail) {
this.tokens = tokens;
this.detail = detail;
}
public AnalyzeResponse(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
if (size > 0) {
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(new AnalyzeToken(in));
}
}
else {
tokens = null;
}
detail = in.readOptionalWriteable(DetailAnalyzeResponse::new);
}
@Override
public void readFrom(StreamInput in) throws IOException {
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
public List<AnalyzeToken> getTokens() {
return this.tokens;
}
@ -268,20 +275,6 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
return PARSER.parse(parser, null);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(AnalyzeToken.readAnalyzeToken(in));
}
if (tokens.size() == 0) {
tokens = null;
}
detail = in.readOptionalStreamable(DetailAnalyzeResponse::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
@ -293,7 +286,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
} else {
out.writeVInt(0);
}
out.writeOptionalStreamable(detail);
out.writeOptionalWriteable(detail);
}
@Override

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -40,16 +40,13 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
public class DetailAnalyzeResponse implements Writeable, ToXContentFragment {
private boolean customAnalyzer = false;
private AnalyzeTokenList analyzer;
private CharFilteredText[] charfilters;
private AnalyzeTokenList tokenizer;
private AnalyzeTokenList[] tokenfilters;
DetailAnalyzeResponse() {
}
private final boolean customAnalyzer;
private final AnalyzeTokenList analyzer;
private final CharFilteredText[] charfilters;
private final AnalyzeTokenList tokenizer;
private final AnalyzeTokenList[] tokenfilters;
public DetailAnalyzeResponse(AnalyzeTokenList analyzer) {
this(false, analyzer, null, null, null);
@ -71,46 +68,55 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
this.tokenfilters = tokenfilters;
}
public AnalyzeTokenList analyzer() {
return this.analyzer;
public DetailAnalyzeResponse(StreamInput in) throws IOException {
this.customAnalyzer = in.readBoolean();
if (customAnalyzer) {
tokenizer = new AnalyzeTokenList(in);
int size = in.readVInt();
if (size > 0) {
charfilters = new CharFilteredText[size];
for (int i = 0; i < size; i++) {
charfilters[i] = new CharFilteredText(in);
}
}
else {
charfilters = null;
}
size = in.readVInt();
if (size > 0) {
tokenfilters = new AnalyzeTokenList[size];
for (int i = 0; i < size; i++) {
tokenfilters[i] = new AnalyzeTokenList(in);
}
}
else {
tokenfilters = null;
}
analyzer = null;
} else {
analyzer = new AnalyzeTokenList(in);
tokenfilters = null;
tokenizer = null;
charfilters = null;
}
}
public DetailAnalyzeResponse analyzer(AnalyzeTokenList analyzer) {
this.customAnalyzer = false;
this.analyzer = analyzer;
return this;
public AnalyzeTokenList analyzer() {
return this.analyzer;
}
public CharFilteredText[] charfilters() {
return this.charfilters;
}
public DetailAnalyzeResponse charfilters(CharFilteredText[] charfilters) {
this.customAnalyzer = true;
this.charfilters = charfilters;
return this;
}
public AnalyzeTokenList tokenizer() {
return tokenizer;
}
public DetailAnalyzeResponse tokenizer(AnalyzeTokenList tokenizer) {
this.customAnalyzer = true;
this.tokenizer = tokenizer;
return this;
}
public AnalyzeTokenList[] tokenfilters() {
return tokenfilters;
}
public DetailAnalyzeResponse tokenfilters(AnalyzeTokenList[] tokenfilters) {
this.customAnalyzer = true;
this.tokenfilters = tokenfilters;
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
@ -201,30 +207,6 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
static final String TOKENFILTERS = "tokenfilters";
}
@Override
public void readFrom(StreamInput in) throws IOException {
this.customAnalyzer = in.readBoolean();
if (customAnalyzer) {
tokenizer = AnalyzeTokenList.readAnalyzeTokenList(in);
int size = in.readVInt();
if (size > 0) {
charfilters = new CharFilteredText[size];
for (int i = 0; i < size; i++) {
charfilters[i] = CharFilteredText.readCharFilteredText(in);
}
}
size = in.readVInt();
if (size > 0) {
tokenfilters = new AnalyzeTokenList[size];
for (int i = 0; i < size; i++) {
tokenfilters[i] = AnalyzeTokenList.readAnalyzeTokenList(in);
}
}
} else {
analyzer = AnalyzeTokenList.readAnalyzeTokenList(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(customAnalyzer);
@ -251,9 +233,9 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
}
}
public static class AnalyzeTokenList implements Streamable, ToXContentObject {
private String name;
private AnalyzeResponse.AnalyzeToken[] tokens;
public static class AnalyzeTokenList implements Writeable, ToXContentObject {
private final String name;
private final AnalyzeResponse.AnalyzeToken[] tokens;
@Override
public boolean equals(Object o) {
@ -271,14 +253,25 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return result;
}
AnalyzeTokenList() {
}
public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) {
this.name = name;
this.tokens = tokens;
}
public AnalyzeTokenList(StreamInput in) throws IOException {
name = in.readString();
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeResponse.AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = new AnalyzeResponse.AnalyzeToken(in);
}
}
else {
tokens = null;
}
}
public String getName() {
return name;
}
@ -287,12 +280,6 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return tokens;
}
public static AnalyzeTokenList readAnalyzeTokenList(StreamInput in) throws IOException {
AnalyzeTokenList list = new AnalyzeTokenList();
list.readFrom(in);
return list;
}
XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.NAME, this.name);
builder.startArray(AnalyzeResponse.Fields.TOKENS);
@ -327,18 +314,6 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return PARSER.parse(parser, null);
}
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeResponse.AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = AnalyzeResponse.AnalyzeToken.readAnalyzeToken(in);
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
@ -353,12 +328,9 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
}
}
public static class CharFilteredText implements Streamable, ToXContentObject {
private String name;
private String[] texts;
CharFilteredText() {
}
public static class CharFilteredText implements Writeable, ToXContentObject {
private final String name;
private final String[] texts;
public CharFilteredText(String name, String[] texts) {
this.name = name;
@ -369,6 +341,11 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
}
}
public CharFilteredText(StreamInput in) throws IOException {
name = in.readString();
texts = in.readStringArray();
}
public String getName() {
return name;
}
@ -398,18 +375,6 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment {
return PARSER.parse(parser, null);
}
public static CharFilteredText readCharFilteredText(StreamInput in) throws IOException {
CharFilteredText text = new CharFilteredText();
text.readFrom(in);
return text;
}
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
texts = in.readStringArray();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);

View File

@ -40,6 +40,7 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.env.Environment;
@ -97,7 +98,12 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
@Override
protected AnalyzeResponse newResponse() {
return new AnalyzeResponse();
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
protected Writeable.Reader<AnalyzeResponse> getResponseReader() {
return AnalyzeResponse::new;
}
@Override

View File

@ -38,6 +38,7 @@ import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.index.shard.ShardId;
@ -118,8 +119,18 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
}
});
}
@Deprecated
protected abstract Response newResponse();
protected Writeable.Reader<Response> getResponseReader() {
return in -> {
Response response = newResponse();
response.readFrom(in);
return response;
};
}
protected abstract boolean resolveIndex(Request request);
protected ClusterBlockException checkGlobalBlock(ClusterState state) {
@ -182,13 +193,12 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
public void start() {
if (shardIt == null) {
// just execute it on the local node
final Writeable.Reader<Response> reader = getResponseReader();
transportService.sendRequest(clusterService.localNode(), transportShardAction, internalRequest.request(),
new TransportResponseHandler<Response>() {
@Override
public Response read(StreamInput in) throws IOException {
Response response = newResponse();
response.readFrom(in);
return response;
return reader.read(in);
}
@Override
@ -251,14 +261,13 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
node
);
}
final Writeable.Reader<Response> reader = getResponseReader();
transportService.sendRequest(node, transportShardAction, internalRequest.request(),
new TransportResponseHandler<Response>() {
@Override
public Response read(StreamInput in) throws IOException {
Response response = newResponse();
response.readFrom(in);
return response;
return reader.read(in);
}
@Override

View File

@ -20,12 +20,13 @@
package org.elasticsearch.action.admin.indices.analyze;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
import org.elasticsearch.test.AbstractSerializingTestCase;
import java.io.IOException;
import java.util.ArrayList;
@ -37,7 +38,7 @@ import java.util.function.Predicate;
import static org.hamcrest.Matchers.equalTo;
public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase<AnalyzeResponse> {
public class AnalyzeResponseTests extends AbstractSerializingTestCase<AnalyzeResponse> {
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
@ -50,8 +51,8 @@ public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase<Ana
}
@Override
protected AnalyzeResponse createBlankInstance() {
return new AnalyzeResponse();
protected Writeable.Reader<AnalyzeResponse> instanceReader() {
return AnalyzeResponse::new;
}
@Override
@ -61,21 +62,24 @@ public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase<Ana
for (int i = 0; i < tokenCount; i++) {
tokens[i] = randomToken();
}
DetailAnalyzeResponse dar = null;
if (randomBoolean()) {
dar = new DetailAnalyzeResponse();
DetailAnalyzeResponse.CharFilteredText[] charfilters = null;
DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = null;
if (randomBoolean()) {
dar.charfilters(new DetailAnalyzeResponse.CharFilteredText[]{
charfilters = new DetailAnalyzeResponse.CharFilteredText[]{
new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"})
});
};
}
dar.tokenizer(new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens));
if (randomBoolean()) {
dar.tokenfilters(new DetailAnalyzeResponse.AnalyzeTokenList[]{
tokenfilters = new DetailAnalyzeResponse.AnalyzeTokenList[]{
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens),
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens)
});
};
}
DetailAnalyzeResponse dar = new DetailAnalyzeResponse(
charfilters,
new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens),
tokenfilters);
return new AnalyzeResponse(null, dar);
}
return new AnalyzeResponse(Arrays.asList(tokens), null);