Same as #59626 for a few more spots.
This commit is contained in:
parent
3a0e7f4294
commit
753fd4f6bc
|
@ -694,16 +694,13 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
* adds it to the given exception.
|
||||
*/
|
||||
public static <T extends Throwable> T readStackTrace(T throwable, StreamInput in) throws IOException {
|
||||
final int stackTraceElements = in.readVInt();
|
||||
StackTraceElement[] stackTrace = new StackTraceElement[stackTraceElements];
|
||||
for (int i = 0; i < stackTraceElements; i++) {
|
||||
final String declaringClasss = in.readString();
|
||||
final String fileName = in.readOptionalString();
|
||||
final String methodName = in.readString();
|
||||
final int lineNumber = in.readVInt();
|
||||
stackTrace[i] = new StackTraceElement(declaringClasss, methodName, fileName, lineNumber);
|
||||
}
|
||||
throwable.setStackTrace(stackTrace);
|
||||
throwable.setStackTrace(in.readArray(i -> {
|
||||
final String declaringClasss = i.readString();
|
||||
final String fileName = i.readOptionalString();
|
||||
final String methodName = i.readString();
|
||||
final int lineNumber = i.readVInt();
|
||||
return new StackTraceElement(declaringClasss, methodName, fileName, lineNumber);
|
||||
}, StackTraceElement[]::new));
|
||||
|
||||
int numSuppressed = in.readVInt();
|
||||
for (int i = 0; i < numSuppressed; i++) {
|
||||
|
@ -717,19 +714,13 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
*/
|
||||
public static <T extends Throwable> T writeStackTraces(T throwable, StreamOutput out,
|
||||
Writer<Throwable> exceptionWriter) throws IOException {
|
||||
StackTraceElement[] stackTrace = throwable.getStackTrace();
|
||||
out.writeVInt(stackTrace.length);
|
||||
for (StackTraceElement element : stackTrace) {
|
||||
out.writeString(element.getClassName());
|
||||
out.writeOptionalString(element.getFileName());
|
||||
out.writeString(element.getMethodName());
|
||||
out.writeVInt(element.getLineNumber());
|
||||
}
|
||||
Throwable[] suppressed = throwable.getSuppressed();
|
||||
out.writeVInt(suppressed.length);
|
||||
for (Throwable t : suppressed) {
|
||||
exceptionWriter.write(out, t);
|
||||
}
|
||||
out.writeArray((o, v) -> {
|
||||
o.writeString(v.getClassName());
|
||||
o.writeOptionalString(v.getFileName());
|
||||
o.writeString(v.getMethodName());
|
||||
o.writeVInt(v.getLineNumber());
|
||||
}, throwable.getStackTrace());
|
||||
out.writeArray(exceptionWriter, throwable.getSuppressed());
|
||||
return throwable;
|
||||
}
|
||||
|
||||
|
|
|
@ -75,11 +75,7 @@ public class GetIndexRequest extends ClusterInfoRequest<GetIndexRequest> {
|
|||
|
||||
public GetIndexRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int size = in.readVInt();
|
||||
features = new Feature[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
features[i] = Feature.fromId(in.readByte());
|
||||
}
|
||||
features = in.readArray(i -> Feature.fromId(i.readByte()), Feature[]::new);
|
||||
humanReadable = in.readBoolean();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
includeDefaults = in.readBoolean();
|
||||
|
@ -145,10 +141,7 @@ public class GetIndexRequest extends ClusterInfoRequest<GetIndexRequest> {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(features.length);
|
||||
for (Feature feature : features) {
|
||||
out.writeByte(feature.id);
|
||||
}
|
||||
out.writeArray((o, f) -> o.writeByte(f.id), features);
|
||||
out.writeBoolean(humanReadable);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
out.writeBoolean(includeDefaults);
|
||||
|
|
|
@ -28,8 +28,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -38,20 +36,11 @@ import java.util.Map;
|
|||
*/
|
||||
public class RecoveryResponse extends BroadcastResponse {
|
||||
|
||||
private Map<String, List<RecoveryState>> shardRecoveryStates = new HashMap<>();
|
||||
private final Map<String, List<RecoveryState>> shardRecoveryStates;
|
||||
|
||||
public RecoveryResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int size = in.readVInt();
|
||||
for (int i = 0; i < size; i++) {
|
||||
String s = in.readString();
|
||||
int listSize = in.readVInt();
|
||||
List<RecoveryState> list = new ArrayList<>(listSize);
|
||||
for (int j = 0; j < listSize; j++) {
|
||||
list.add(RecoveryState.readRecoveryState(in));
|
||||
}
|
||||
shardRecoveryStates.put(s, list);
|
||||
}
|
||||
shardRecoveryStates = in.readMapOfLists(StreamInput::readString, RecoveryState::new);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -105,14 +94,7 @@ public class RecoveryResponse extends BroadcastResponse {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(shardRecoveryStates.size());
|
||||
for (Map.Entry<String, List<RecoveryState>> entry : shardRecoveryStates.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeVInt(entry.getValue().size());
|
||||
for (RecoveryState recoveryState : entry.getValue()) {
|
||||
recoveryState.writeTo(out);
|
||||
}
|
||||
}
|
||||
out.writeMapOfLists(shardRecoveryStates, StreamOutput::writeString, (o, v) -> v.writeTo(o));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -93,7 +93,7 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
|
|||
private String rolloverTarget;
|
||||
private String newIndexName;
|
||||
private boolean dryRun;
|
||||
private Map<String, Condition<?>> conditions = new HashMap<>(2);
|
||||
private final Map<String, Condition<?>> conditions = new HashMap<>(2);
|
||||
//the index name "_na_" is never read back, what matters are settings, mappings and aliases
|
||||
private CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_");
|
||||
|
||||
|
|
|
@ -44,16 +44,13 @@ import java.util.Set;
|
|||
|
||||
public class IndicesSegmentResponse extends BroadcastResponse {
|
||||
|
||||
private ShardSegments[] shards;
|
||||
private final ShardSegments[] shards;
|
||||
|
||||
private Map<String, IndexSegments> indicesSegments;
|
||||
|
||||
IndicesSegmentResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
shards = new ShardSegments[in.readVInt()];
|
||||
for (int i = 0; i < shards.length; i++) {
|
||||
shards[i] = new ShardSegments(in);
|
||||
}
|
||||
shards = in.readArray(ShardSegments::new, ShardSegments[]::new);
|
||||
}
|
||||
|
||||
IndicesSegmentResponse(ShardSegments[] shards, int totalShards, int successfulShards, int failedShards,
|
||||
|
@ -89,10 +86,7 @@ public class IndicesSegmentResponse extends BroadcastResponse {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(shards.length);
|
||||
for (ShardSegments shard : shards) {
|
||||
shard.writeTo(out);
|
||||
}
|
||||
out.writeArray(shards);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.ToXContentFragment;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -51,10 +50,10 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
|
|||
* Shard store information from a node
|
||||
*/
|
||||
public static class StoreStatus implements Writeable, ToXContentFragment, Comparable<StoreStatus> {
|
||||
private DiscoveryNode node;
|
||||
private String allocationId;
|
||||
private final DiscoveryNode node;
|
||||
private final String allocationId;
|
||||
private Exception storeException;
|
||||
private AllocationStatus allocationStatus;
|
||||
private final AllocationStatus allocationStatus;
|
||||
|
||||
/**
|
||||
* The status of the shard store with respect to the cluster
|
||||
|
@ -264,8 +263,8 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
|
|||
}
|
||||
}
|
||||
|
||||
private ImmutableOpenMap<String, ImmutableOpenIntMap<List<StoreStatus>>> storeStatuses;
|
||||
private List<Failure> failures;
|
||||
private final ImmutableOpenMap<String, ImmutableOpenIntMap<List<StoreStatus>>> storeStatuses;
|
||||
private final List<Failure> failures;
|
||||
|
||||
public IndicesShardStoresResponse(ImmutableOpenMap<String, ImmutableOpenIntMap<List<StoreStatus>>> storeStatuses,
|
||||
List<Failure> failures) {
|
||||
|
@ -279,30 +278,15 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
|
|||
|
||||
public IndicesShardStoresResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int numResponse = in.readVInt();
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenIntMap<List<StoreStatus>>> storeStatusesBuilder = ImmutableOpenMap.builder();
|
||||
for (int i = 0; i < numResponse; i++) {
|
||||
String index = in.readString();
|
||||
int indexEntries = in.readVInt();
|
||||
storeStatuses = in.readImmutableMap(StreamInput::readString, i -> {
|
||||
int indexEntries = i.readVInt();
|
||||
ImmutableOpenIntMap.Builder<List<StoreStatus>> shardEntries = ImmutableOpenIntMap.builder();
|
||||
for (int shardCount = 0; shardCount < indexEntries; shardCount++) {
|
||||
int shardID = in.readInt();
|
||||
int nodeEntries = in.readVInt();
|
||||
List<StoreStatus> storeStatuses = new ArrayList<>(nodeEntries);
|
||||
for (int nodeCount = 0; nodeCount < nodeEntries; nodeCount++) {
|
||||
storeStatuses.add(new StoreStatus(in));
|
||||
shardEntries.put(i.readInt(), i.readList(StoreStatus::new));
|
||||
}
|
||||
shardEntries.put(shardID, storeStatuses);
|
||||
}
|
||||
storeStatusesBuilder.put(index, shardEntries.build());
|
||||
}
|
||||
int numFailure = in.readVInt();
|
||||
List<Failure> failureBuilder = new ArrayList<>();
|
||||
for (int i = 0; i < numFailure; i++) {
|
||||
failureBuilder.add(Failure.readFailure(in));
|
||||
}
|
||||
storeStatuses = storeStatusesBuilder.build();
|
||||
failures = Collections.unmodifiableList(failureBuilder);
|
||||
return shardEntries.build();
|
||||
});
|
||||
failures = Collections.unmodifiableList(in.readList(Failure::readFailure));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -323,22 +307,14 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(storeStatuses.size());
|
||||
for (ObjectObjectCursor<String, ImmutableOpenIntMap<List<StoreStatus>>> indexShards : storeStatuses) {
|
||||
out.writeString(indexShards.key);
|
||||
out.writeVInt(indexShards.value.size());
|
||||
for (IntObjectCursor<List<StoreStatus>> shardStatusesEntry : indexShards.value) {
|
||||
out.writeInt(shardStatusesEntry.key);
|
||||
out.writeVInt(shardStatusesEntry.value.size());
|
||||
for (StoreStatus storeStatus : shardStatusesEntry.value) {
|
||||
storeStatus.writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
out.writeVInt(failures.size());
|
||||
for (Failure failure : failures) {
|
||||
failure.writeTo(out);
|
||||
out.writeMap(storeStatuses, StreamOutput::writeString, (o, v) -> {
|
||||
o.writeVInt(v.size());
|
||||
for (IntObjectCursor<List<StoreStatus>> shardStatusesEntry : v) {
|
||||
o.writeInt(shardStatusesEntry.key);
|
||||
o.writeCollection(shardStatusesEntry.value);
|
||||
}
|
||||
});
|
||||
out.writeList(failures);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -103,11 +102,7 @@ public class GetComponentTemplateAction extends ActionType<GetComponentTemplateA
|
|||
|
||||
public Response(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int size = in.readVInt();
|
||||
componentTemplates = new HashMap<>();
|
||||
for (int i = 0 ; i < size ; i++) {
|
||||
componentTemplates.put(in.readString(), new ComponentTemplate(in));
|
||||
}
|
||||
componentTemplates = in.readMap(StreamInput::readString, ComponentTemplate::new);
|
||||
}
|
||||
|
||||
public Response(Map<String, ComponentTemplate> componentTemplates) {
|
||||
|
@ -120,11 +115,7 @@ public class GetComponentTemplateAction extends ActionType<GetComponentTemplateA
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(componentTemplates.size());
|
||||
for (Map.Entry<String, ComponentTemplate> componentTemplate : componentTemplates.entrySet()) {
|
||||
out.writeString(componentTemplate.getKey());
|
||||
componentTemplate.getValue().writeTo(out);
|
||||
}
|
||||
out.writeMap(componentTemplates, StreamOutput::writeString, (o, v) -> v.writeTo(o));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -117,11 +116,7 @@ public class GetComposableIndexTemplateAction extends ActionType<GetComposableIn
|
|||
|
||||
public Response(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int size = in.readVInt();
|
||||
indexTemplates = new HashMap<>();
|
||||
for (int i = 0 ; i < size ; i++) {
|
||||
indexTemplates.put(in.readString(), new ComposableIndexTemplate(in));
|
||||
}
|
||||
indexTemplates = in.readMap(StreamInput::readString, ComposableIndexTemplate::new);
|
||||
}
|
||||
|
||||
public Response(Map<String, ComposableIndexTemplate> indexTemplates) {
|
||||
|
@ -134,11 +129,7 @@ public class GetComposableIndexTemplateAction extends ActionType<GetComposableIn
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(indexTemplates.size());
|
||||
for (Map.Entry<String, ComposableIndexTemplate> indexTemplate : indexTemplates.entrySet()) {
|
||||
out.writeString(indexTemplate.getKey());
|
||||
indexTemplate.getValue().writeTo(out);
|
||||
}
|
||||
out.writeMap(indexTemplates, StreamOutput::writeString, (o, v) -> v.writeTo(o));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -39,18 +38,11 @@ import java.util.Map;
|
|||
*/
|
||||
public class UpgradeResponse extends BroadcastResponse {
|
||||
|
||||
private Map<String, Tuple<Version, String>> versions;
|
||||
private final Map<String, Tuple<Version, String>> versions;
|
||||
|
||||
UpgradeResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int size = in.readVInt();
|
||||
versions = new HashMap<>();
|
||||
for (int i=0; i<size; i++) {
|
||||
String index = in.readString();
|
||||
Version upgradeVersion = Version.readVersion(in);
|
||||
String oldestLuceneSegment = in.readString();
|
||||
versions.put(index, new Tuple<>(upgradeVersion, oldestLuceneSegment));
|
||||
}
|
||||
versions = in.readMap(StreamInput::readString, i -> Tuple.tuple(Version.readVersion(i), i.readString()));
|
||||
}
|
||||
|
||||
UpgradeResponse(Map<String, Tuple<Version, String>> versions, int totalShards, int successfulShards, int failedShards,
|
||||
|
@ -62,12 +54,10 @@ public class UpgradeResponse extends BroadcastResponse {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(versions.size());
|
||||
for(Map.Entry<String, Tuple<Version, String>> entry : versions.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
Version.writeVersion(entry.getValue().v1(), out);
|
||||
out.writeString(entry.getValue().v2());
|
||||
}
|
||||
out.writeMap(versions, StreamOutput::writeString, (o, v) -> {
|
||||
Version.writeVersion(v.v1(), o);
|
||||
o.writeString(v.v2());
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
@ -41,14 +40,7 @@ public class UpgradeSettingsRequest extends AcknowledgedRequest<UpgradeSettingsR
|
|||
|
||||
public UpgradeSettingsRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int size = in.readVInt();
|
||||
versions = new HashMap<>();
|
||||
for (int i=0; i<size; i++) {
|
||||
String index = in.readString();
|
||||
Version upgradeVersion = Version.readVersion(in);
|
||||
String oldestLuceneSegment = in.readString();
|
||||
versions.put(index, new Tuple<>(upgradeVersion, oldestLuceneSegment));
|
||||
}
|
||||
versions = in.readMap(StreamInput::readString, i -> new Tuple<>(Version.readVersion(i), i.readString()));
|
||||
}
|
||||
|
||||
public UpgradeSettingsRequest() {
|
||||
|
@ -89,11 +81,9 @@ public class UpgradeSettingsRequest extends AcknowledgedRequest<UpgradeSettingsR
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(versions.size());
|
||||
for(Map.Entry<String, Tuple<Version, String>> entry : versions.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
Version.writeVersion(entry.getValue().v1(), out);
|
||||
out.writeString(entry.getValue().v2());
|
||||
}
|
||||
out.writeMap(versions, StreamOutput::writeString, (o, v) -> {
|
||||
Version.writeVersion(v.v1(), out);
|
||||
out.writeString(v.v2());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -72,30 +71,21 @@ public class ValidateQueryResponse extends BroadcastResponse {
|
|||
);
|
||||
}
|
||||
|
||||
private boolean valid;
|
||||
private final boolean valid;
|
||||
|
||||
private List<QueryExplanation> queryExplanations;
|
||||
private final List<QueryExplanation> queryExplanations;
|
||||
|
||||
ValidateQueryResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
valid = in.readBoolean();
|
||||
int size = in.readVInt();
|
||||
if (size > 0) {
|
||||
queryExplanations = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
queryExplanations.add(new QueryExplanation(in));
|
||||
}
|
||||
}
|
||||
queryExplanations = in.readList(QueryExplanation::new);
|
||||
}
|
||||
|
||||
ValidateQueryResponse(boolean valid, List<QueryExplanation> queryExplanations, int totalShards, int successfulShards, int failedShards,
|
||||
List<DefaultShardOperationFailedException> shardFailures) {
|
||||
super(totalShards, successfulShards, failedShards, shardFailures);
|
||||
this.valid = valid;
|
||||
this.queryExplanations = queryExplanations;
|
||||
if (queryExplanations == null) {
|
||||
this.queryExplanations = Collections.emptyList();
|
||||
}
|
||||
this.queryExplanations = queryExplanations == null ? Collections.emptyList() : queryExplanations;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -109,9 +99,6 @@ public class ValidateQueryResponse extends BroadcastResponse {
|
|||
* The list of query explanations.
|
||||
*/
|
||||
public List<? extends QueryExplanation> getQueryExplanation() {
|
||||
if (queryExplanations == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return queryExplanations;
|
||||
}
|
||||
|
||||
|
@ -119,11 +106,7 @@ public class ValidateQueryResponse extends BroadcastResponse {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(valid);
|
||||
out.writeVInt(queryExplanations.size());
|
||||
for (QueryExplanation exp : queryExplanations) {
|
||||
exp.writeTo(out);
|
||||
}
|
||||
|
||||
out.writeCollection(queryExplanations);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -50,21 +50,14 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
|
|||
public SearchPhaseExecutionException(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
phaseName = in.readOptionalString();
|
||||
int numFailures = in.readVInt();
|
||||
shardFailures = new ShardSearchFailure[numFailures];
|
||||
for (int i = 0; i < numFailures; i++) {
|
||||
shardFailures[i] = ShardSearchFailure.readShardSearchFailure(in);
|
||||
}
|
||||
shardFailures = in.readArray(ShardSearchFailure::readShardSearchFailure, ShardSearchFailure[]::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeOptionalString(phaseName);
|
||||
out.writeVInt(shardFailures.length);
|
||||
for (ShardSearchFailure failure : shardFailures) {
|
||||
failure.writeTo(out);
|
||||
}
|
||||
out.writeArray(shardFailures);
|
||||
}
|
||||
|
||||
private static Throwable deduplicateCause(Throwable cause, ShardSearchFailure[] shardFailures) {
|
||||
|
|
|
@ -152,10 +152,7 @@ public class MultiTermVectorsRequest extends ActionRequest
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeOptionalString(preference);
|
||||
out.writeVInt(requests.size());
|
||||
for (TermVectorsRequest termVectorsRequest : requests) {
|
||||
termVectorsRequest.writeTo(out);
|
||||
}
|
||||
out.writeCollection(requests);
|
||||
}
|
||||
|
||||
public void ids(String[] ids) {
|
||||
|
|
|
@ -104,10 +104,7 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
|
|||
|
||||
public MultiTermVectorsResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
responses = new MultiTermVectorsItemResponse[in.readVInt()];
|
||||
for (int i = 0; i < responses.length; i++) {
|
||||
responses[i] = new MultiTermVectorsItemResponse(in);
|
||||
}
|
||||
responses = in.readArray(MultiTermVectorsItemResponse::new, MultiTermVectorsItemResponse[]::new);
|
||||
}
|
||||
|
||||
public MultiTermVectorsItemResponse[] getResponses() {
|
||||
|
@ -151,9 +148,6 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(responses.length);
|
||||
for (MultiTermVectorsItemResponse response : responses) {
|
||||
response.writeTo(out);
|
||||
}
|
||||
out.writeArray(responses);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -134,7 +134,6 @@ public final class TermVectorsFields extends Fields {
|
|||
*/
|
||||
public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
|
||||
try (StreamInput header = headerRef.streamInput()) {
|
||||
fieldMap = new ObjectLongHashMap<>();
|
||||
// here we read the header to fill the field offset map
|
||||
String headerString = header.readString();
|
||||
assert headerString.equals("TV");
|
||||
|
@ -144,6 +143,7 @@ public final class TermVectorsFields extends Fields {
|
|||
hasFieldStatistic = header.readBoolean();
|
||||
hasScores = header.readBoolean();
|
||||
final int numFields = header.readVInt();
|
||||
fieldMap = new ObjectLongHashMap<>(numFields);
|
||||
for (int i = 0; i < numFields; i++) {
|
||||
fieldMap.put((header.readString()), header.readVLong());
|
||||
}
|
||||
|
|
|
@ -487,7 +487,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
* Sets the settings for filtering out terms.
|
||||
*/
|
||||
public TermVectorsRequest filterSettings(FilterSettings settings) {
|
||||
this.filterSettings = settings != null ? settings : null;
|
||||
this.filterSettings = settings;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -552,10 +552,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
}
|
||||
out.writeVLong(longFlags);
|
||||
if (selectedFields != null) {
|
||||
out.writeVInt(selectedFields.size());
|
||||
for (String selectedField : selectedFields) {
|
||||
out.writeString(selectedField);
|
||||
}
|
||||
out.writeStringCollection(selectedFields);
|
||||
} else {
|
||||
out.writeVInt(0);
|
||||
}
|
||||
|
|
|
@ -108,32 +108,11 @@ public class ClusterInfo implements ToXContentFragment, Writeable {
|
|||
out.writeString(c.key);
|
||||
c.value.writeTo(out);
|
||||
}
|
||||
out.writeVInt(this.mostAvailableSpaceUsage.size());
|
||||
for (ObjectObjectCursor<String, DiskUsage> c : this.mostAvailableSpaceUsage) {
|
||||
out.writeString(c.key);
|
||||
c.value.writeTo(out);
|
||||
}
|
||||
out.writeVInt(this.shardSizes.size());
|
||||
for (ObjectObjectCursor<String, Long> c : this.shardSizes) {
|
||||
out.writeString(c.key);
|
||||
if (c.value == null) {
|
||||
out.writeLong(-1);
|
||||
} else {
|
||||
out.writeLong(c.value);
|
||||
}
|
||||
}
|
||||
out.writeVInt(this.routingToDataPath.size());
|
||||
for (ObjectObjectCursor<ShardRouting, String> c : this.routingToDataPath) {
|
||||
c.key.writeTo(out);
|
||||
out.writeString(c.value);
|
||||
}
|
||||
|
||||
out.writeMap(this.mostAvailableSpaceUsage, StreamOutput::writeString, (o, v) -> v.writeTo(o));
|
||||
out.writeMap(this.shardSizes, StreamOutput::writeString, (o, v) -> out.writeLong(v == null ? -1 : v));
|
||||
out.writeMap(this.routingToDataPath, (o, k) -> k.writeTo(o), StreamOutput::writeString);
|
||||
if (out.getVersion().onOrAfter(StoreStats.RESERVED_BYTES_VERSION)) {
|
||||
out.writeVInt(this.reservedSpace.size());
|
||||
for (ObjectObjectCursor<NodeAndPath, ReservedSpace> c : this.reservedSpace) {
|
||||
c.key.writeTo(out);
|
||||
c.value.writeTo(out);
|
||||
}
|
||||
out.writeMap(this.reservedSpace);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -241,7 +220,7 @@ public class ClusterInfo implements ToXContentFragment, Writeable {
|
|||
/**
|
||||
* Represents a data path on a node
|
||||
*/
|
||||
public static class NodeAndPath {
|
||||
public static class NodeAndPath implements Writeable {
|
||||
public final String nodeId;
|
||||
public final String path;
|
||||
|
||||
|
@ -268,6 +247,7 @@ public class ClusterInfo implements ToXContentFragment, Writeable {
|
|||
return Objects.hash(nodeId, path);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(nodeId);
|
||||
out.writeString(path);
|
||||
|
@ -277,7 +257,7 @@ public class ClusterInfo implements ToXContentFragment, Writeable {
|
|||
/**
|
||||
* Represents the total amount of "reserved" space on a particular data path, together with the set of shards considered.
|
||||
*/
|
||||
public static class ReservedSpace {
|
||||
public static class ReservedSpace implements Writeable {
|
||||
|
||||
public static final ReservedSpace EMPTY = new ReservedSpace(0, new ObjectHashSet<>());
|
||||
|
||||
|
@ -298,7 +278,8 @@ public class ClusterInfo implements ToXContentFragment, Writeable {
|
|||
}
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(total);
|
||||
out.writeVInt(shardIds.size());
|
||||
for (ObjectCursor<ShardId> shardIdCursor : shardIds) {
|
||||
|
|
|
@ -29,21 +29,20 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ClusterBlock implements Writeable, ToXContentFragment {
|
||||
|
||||
private int id;
|
||||
private @Nullable String uuid;
|
||||
private String description;
|
||||
private EnumSet<ClusterBlockLevel> levels;
|
||||
private boolean retryable;
|
||||
private boolean disableStatePersistence = false;
|
||||
private boolean allowReleaseResources;
|
||||
private RestStatus status;
|
||||
private final int id;
|
||||
@Nullable private final String uuid;
|
||||
private final String description;
|
||||
private final EnumSet<ClusterBlockLevel> levels;
|
||||
private final boolean retryable;
|
||||
private final boolean disableStatePersistence;
|
||||
private final boolean allowReleaseResources;
|
||||
private final RestStatus status;
|
||||
|
||||
public ClusterBlock(StreamInput in) throws IOException {
|
||||
id = in.readVInt();
|
||||
|
@ -53,12 +52,7 @@ public class ClusterBlock implements Writeable, ToXContentFragment {
|
|||
uuid = null;
|
||||
}
|
||||
description = in.readString();
|
||||
final int len = in.readVInt();
|
||||
ArrayList<ClusterBlockLevel> levels = new ArrayList<>(len);
|
||||
for (int i = 0; i < len; i++) {
|
||||
levels.add(in.readEnum(ClusterBlockLevel.class));
|
||||
}
|
||||
this.levels = EnumSet.copyOf(levels);
|
||||
this.levels = in.readEnumSet(ClusterBlockLevel.class);
|
||||
retryable = in.readBoolean();
|
||||
disableStatePersistence = in.readBoolean();
|
||||
status = RestStatus.readFrom(in);
|
||||
|
@ -154,10 +148,7 @@ public class ClusterBlock implements Writeable, ToXContentFragment {
|
|||
out.writeOptionalString(uuid);
|
||||
}
|
||||
out.writeString(description);
|
||||
out.writeVInt(levels.size());
|
||||
for (ClusterBlockLevel level : levels) {
|
||||
out.writeEnum(level);
|
||||
}
|
||||
out.writeEnumSet(levels);
|
||||
out.writeBoolean(retryable);
|
||||
out.writeBoolean(disableStatePersistence);
|
||||
RestStatus.writeTo(out, status);
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.rest.RestStatus;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
@ -49,22 +48,14 @@ public class ClusterBlockException extends ElasticsearchException {
|
|||
|
||||
public ClusterBlockException(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int totalBlocks = in.readVInt();
|
||||
Set<ClusterBlock> blocks = new HashSet<>(totalBlocks);
|
||||
for (int i = 0; i < totalBlocks;i++) {
|
||||
blocks.add(new ClusterBlock(in));
|
||||
}
|
||||
this.blocks = unmodifiableSet(blocks);
|
||||
this.blocks = unmodifiableSet(in.readSet(ClusterBlock::new));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
if (blocks != null) {
|
||||
out.writeVInt(blocks.size());
|
||||
for (ClusterBlock block : blocks) {
|
||||
block.writeTo(out);
|
||||
}
|
||||
out.writeCollection(blocks);
|
||||
} else {
|
||||
out.writeVInt(0);
|
||||
}
|
||||
|
|
|
@ -249,11 +249,7 @@ public final class ClusterIndexHealth implements Iterable<ClusterShardHealth>, W
|
|||
out.writeVInt(initializingShards);
|
||||
out.writeVInt(unassignedShards);
|
||||
out.writeByte(status.value());
|
||||
|
||||
out.writeVInt(shards.size());
|
||||
for (ClusterShardHealth shardHealth : this) {
|
||||
shardHealth.writeTo(out);
|
||||
}
|
||||
out.writeCollection(shards.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -84,12 +84,7 @@ public final class IndexGraveyard implements Metadata.Custom {
|
|||
}
|
||||
|
||||
public IndexGraveyard(final StreamInput in) throws IOException {
|
||||
final int queueSize = in.readVInt();
|
||||
List<Tombstone> tombstones = new ArrayList<>(queueSize);
|
||||
for (int i = 0; i < queueSize; i++) {
|
||||
tombstones.add(new Tombstone(in));
|
||||
}
|
||||
this.tombstones = Collections.unmodifiableList(tombstones);
|
||||
this.tombstones = Collections.unmodifiableList(in.readList(Tombstone::new));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -156,10 +151,7 @@ public final class IndexGraveyard implements Metadata.Custom {
|
|||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
out.writeVInt(tombstones.size());
|
||||
for (Tombstone tombstone : tombstones) {
|
||||
tombstone.writeTo(out);
|
||||
}
|
||||
out.writeList(tombstones);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -897,16 +897,16 @@ public class Metadata implements Iterable<IndexMetadata>, Diffable<Metadata>, To
|
|||
|
||||
private static class MetadataDiff implements Diff<Metadata> {
|
||||
|
||||
private long version;
|
||||
private String clusterUUID;
|
||||
private final long version;
|
||||
private final String clusterUUID;
|
||||
private boolean clusterUUIDCommitted;
|
||||
private CoordinationMetadata coordinationMetadata;
|
||||
private Settings transientSettings;
|
||||
private Settings persistentSettings;
|
||||
private Diff<DiffableStringMap> hashesOfConsistentSettings;
|
||||
private Diff<ImmutableOpenMap<String, IndexMetadata>> indices;
|
||||
private Diff<ImmutableOpenMap<String, IndexTemplateMetadata>> templates;
|
||||
private Diff<ImmutableOpenMap<String, Custom>> customs;
|
||||
private final CoordinationMetadata coordinationMetadata;
|
||||
private final Settings transientSettings;
|
||||
private final Settings persistentSettings;
|
||||
private final Diff<DiffableStringMap> hashesOfConsistentSettings;
|
||||
private final Diff<ImmutableOpenMap<String, IndexMetadata>> indices;
|
||||
private final Diff<ImmutableOpenMap<String, IndexTemplateMetadata>> templates;
|
||||
private final Diff<ImmutableOpenMap<String, Custom>> customs;
|
||||
|
||||
MetadataDiff(Metadata before, Metadata after) {
|
||||
clusterUUID = after.clusterUUID;
|
||||
|
|
|
@ -144,15 +144,12 @@ public final class CompressedXContent {
|
|||
|
||||
public static CompressedXContent readCompressedString(StreamInput in) throws IOException {
|
||||
int crc32 = in.readInt();
|
||||
byte[] compressed = new byte[in.readVInt()];
|
||||
in.readBytes(compressed, 0, compressed.length);
|
||||
return new CompressedXContent(compressed, crc32);
|
||||
return new CompressedXContent(in.readByteArray(), crc32);
|
||||
}
|
||||
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeInt(crc32);
|
||||
out.writeVInt(bytes.length);
|
||||
out.writeBytes(bytes);
|
||||
out.writeByteArray(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -45,16 +45,12 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.parseFieldsV
|
|||
*/
|
||||
public class DocumentField implements Writeable, ToXContentFragment, Iterable<Object> {
|
||||
|
||||
private String name;
|
||||
private List<Object> values;
|
||||
private final String name;
|
||||
private final List<Object> values;
|
||||
|
||||
public DocumentField(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
int size = in.readVInt();
|
||||
values = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
values.add(in.readGenericValue());
|
||||
}
|
||||
values = in.readList(StreamInput::readGenericValue);
|
||||
}
|
||||
|
||||
public DocumentField(String name, List<Object> values) {
|
||||
|
@ -94,10 +90,7 @@ public class DocumentField implements Writeable, ToXContentFragment, Iterable<Ob
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeVInt(values.size());
|
||||
for (Object obj : values) {
|
||||
out.writeGenericValue(obj);
|
||||
}
|
||||
out.writeCollection(values, StreamOutput::writeGenericValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -207,10 +207,7 @@ public class LocalAllocateDangledIndices {
|
|||
public AllocateDangledRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
fromNode = new DiscoveryNode(in);
|
||||
indices = new IndexMetadata[in.readVInt()];
|
||||
for (int i = 0; i < indices.length; i++) {
|
||||
indices[i] = IndexMetadata.readFrom(in);
|
||||
}
|
||||
indices = in.readArray(IndexMetadata::readFrom, IndexMetadata[]::new);
|
||||
}
|
||||
|
||||
AllocateDangledRequest(DiscoveryNode fromNode, IndexMetadata[] indices) {
|
||||
|
@ -222,10 +219,7 @@ public class LocalAllocateDangledIndices {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
fromNode.writeTo(out);
|
||||
out.writeVInt(indices.length);
|
||||
for (IndexMetadata indexMetadata : indices) {
|
||||
indexMetadata.writeTo(out);
|
||||
}
|
||||
out.writeArray(indices);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -151,10 +151,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
boolean hasFieldDataFields = docValueFields != null;
|
||||
out.writeBoolean(hasFieldDataFields);
|
||||
if (hasFieldDataFields) {
|
||||
out.writeVInt(docValueFields.size());
|
||||
for (FieldAndFormat ff : docValueFields) {
|
||||
ff.writeTo(out);
|
||||
}
|
||||
out.writeList(docValueFields);
|
||||
}
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeVInt(from);
|
||||
|
@ -162,19 +159,13 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
boolean hasScriptFields = scriptFields != null;
|
||||
out.writeBoolean(hasScriptFields);
|
||||
if (hasScriptFields) {
|
||||
out.writeVInt(scriptFields.size());
|
||||
for (ScriptField scriptField : scriptFields) {
|
||||
scriptField.writeTo(out);
|
||||
}
|
||||
out.writeCollection(scriptFields);
|
||||
}
|
||||
out.writeVInt(size);
|
||||
boolean hasSorts = sorts != null;
|
||||
out.writeBoolean(hasSorts);
|
||||
if (hasSorts) {
|
||||
out.writeVInt(sorts.size());
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
out.writeNamedWriteable(sort);
|
||||
}
|
||||
out.writeNamedWriteableList(sorts);
|
||||
}
|
||||
out.writeBoolean(trackScores);
|
||||
out.writeBoolean(version);
|
||||
|
|
|
@ -32,7 +32,6 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
|
||||
|
@ -43,7 +42,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
|
|||
public static final String NAME = "bucket_selector";
|
||||
|
||||
private final Map<String, String> bucketsPathsMap;
|
||||
private Script script;
|
||||
private final Script script;
|
||||
private GapPolicy gapPolicy = GapPolicy.SKIP;
|
||||
|
||||
public BucketSelectorPipelineAggregationBuilder(String name, Map<String, String> bucketsPathsMap, Script script) {
|
||||
|
@ -61,22 +60,14 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
|
|||
*/
|
||||
public BucketSelectorPipelineAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, NAME);
|
||||
int mapSize = in.readVInt();
|
||||
bucketsPathsMap = new HashMap<>(mapSize);
|
||||
for (int i = 0; i < mapSize; i++) {
|
||||
bucketsPathsMap.put(in.readString(), in.readString());
|
||||
}
|
||||
bucketsPathsMap = in.readMap(StreamInput::readString, StreamInput::readString);
|
||||
script = new Script(in);
|
||||
gapPolicy = GapPolicy.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(bucketsPathsMap.size());
|
||||
for (Entry<String, String> e : bucketsPathsMap.entrySet()) {
|
||||
out.writeString(e.getKey());
|
||||
out.writeString(e.getValue());
|
||||
}
|
||||
out.writeMap(bucketsPathsMap, StreamOutput::writeString, StreamOutput::writeString);
|
||||
script.writeTo(out);
|
||||
gapPolicy.writeTo(out);
|
||||
}
|
||||
|
|
|
@ -71,8 +71,8 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
return first.getText().compareTo(second.getText());
|
||||
};
|
||||
|
||||
private List<Suggestion<? extends Entry<? extends Option>>> suggestions;
|
||||
private boolean hasScoreDocs;
|
||||
private final List<Suggestion<? extends Entry<? extends Option>>> suggestions;
|
||||
private final boolean hasScoreDocs;
|
||||
|
||||
private Map<String, Suggestion<? extends Entry<? extends Option>>> suggestMap;
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ public class TransportInfo implements ReportingService.Info {
|
|||
private static final boolean CNAME_IN_PUBLISH_ADDRESS =
|
||||
parseBoolean(System.getProperty("es.transport.cname_in_publish_address"), false);
|
||||
|
||||
private BoundTransportAddress address;
|
||||
private final BoundTransportAddress address;
|
||||
private Map<String, BoundTransportAddress> profileAddresses;
|
||||
private final boolean cnameInPublishAddress;
|
||||
|
||||
|
|
|
@ -71,11 +71,7 @@ public class IndexLifecycleMetadata implements XPackMetadataCustom {
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(policyMetadatas.size());
|
||||
for (Map.Entry<String, LifecyclePolicyMetadata> entry : policyMetadatas.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
out.writeMap(policyMetadatas, StreamOutput::writeString, (o, v) -> v.writeTo(o));
|
||||
out.writeEnum(operationMode);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue