Merge branch 'master' into enhancement/make-script-writeable
This commit is contained in:
commit
ee6e53b581
|
@ -739,7 +739,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]AggregationContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]AggregationPath.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]GeoPointParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]ValueType.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]ValuesSourceParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]format[/\\]ValueFormat.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]format[/\\]ValueParser.java" checks="LineLength" />
|
||||
|
@ -776,7 +775,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafDocLookup.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafFieldsLookup.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]profile[/\\]ProfileResult.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]QueryPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescorer.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]RescoreParseElement.java" checks="LineLength" />
|
||||
|
|
|
@ -37,7 +37,7 @@ import static org.elasticsearch.ExceptionsHelper.detailedMessage;
|
|||
*
|
||||
* The class is final due to serialization limitations
|
||||
*/
|
||||
public final class TaskOperationFailure implements Writeable<TaskOperationFailure>, ToXContent {
|
||||
public final class TaskOperationFailure implements Writeable, ToXContent {
|
||||
|
||||
private final String nodeId;
|
||||
|
||||
|
@ -47,6 +47,16 @@ public final class TaskOperationFailure implements Writeable<TaskOperationFailur
|
|||
|
||||
private final RestStatus status;
|
||||
|
||||
public TaskOperationFailure(String nodeId, long taskId, Throwable t) {
|
||||
this.nodeId = nodeId;
|
||||
this.taskId = taskId;
|
||||
this.reason = t;
|
||||
status = ExceptionsHelper.status(t);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public TaskOperationFailure(StreamInput in) throws IOException {
|
||||
nodeId = in.readString();
|
||||
taskId = in.readLong();
|
||||
|
@ -54,11 +64,12 @@ public final class TaskOperationFailure implements Writeable<TaskOperationFailur
|
|||
status = RestStatus.readFrom(in);
|
||||
}
|
||||
|
||||
public TaskOperationFailure(String nodeId, long taskId, Throwable t) {
|
||||
this.nodeId = nodeId;
|
||||
this.taskId = taskId;
|
||||
this.reason = t;
|
||||
status = ExceptionsHelper.status(t);
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(nodeId);
|
||||
out.writeLong(taskId);
|
||||
out.writeThrowable(reason);
|
||||
RestStatus.writeTo(out, status);
|
||||
}
|
||||
|
||||
public String getNodeId() {
|
||||
|
@ -81,19 +92,6 @@ public final class TaskOperationFailure implements Writeable<TaskOperationFailur
|
|||
return reason;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TaskOperationFailure readFrom(StreamInput in) throws IOException {
|
||||
return new TaskOperationFailure(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(nodeId);
|
||||
out.writeLong(taskId);
|
||||
out.writeThrowable(reason);
|
||||
RestStatus.writeTo(out, status);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + nodeId + "][" + taskId + "] failed, reason [" + getReason() + "]";
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.Map;
|
|||
* A {@code ClusterAllocationExplanation} is an explanation of why a shard may or may not be allocated to nodes. It also includes weights
|
||||
* for where the shard is likely to be assigned. It is an immutable class
|
||||
*/
|
||||
public final class ClusterAllocationExplanation implements ToXContent, Writeable<ClusterAllocationExplanation> {
|
||||
public final class ClusterAllocationExplanation implements ToXContent, Writeable {
|
||||
|
||||
private final ShardId shard;
|
||||
private final boolean primary;
|
||||
|
@ -50,6 +50,18 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
|
|||
private final UnassignedInfo unassignedInfo;
|
||||
private final long remainingDelayNanos;
|
||||
|
||||
public ClusterAllocationExplanation(ShardId shard, boolean primary, @Nullable String assignedNodeId,
|
||||
UnassignedInfo unassignedInfo, Map<DiscoveryNode, Decision> nodeToDecision,
|
||||
Map<DiscoveryNode, Float> nodeWeights, long remainingDelayNanos) {
|
||||
this.shard = shard;
|
||||
this.primary = primary;
|
||||
this.assignedNodeId = assignedNodeId;
|
||||
this.unassignedInfo = unassignedInfo;
|
||||
this.nodeToDecision = nodeToDecision == null ? Collections.emptyMap() : nodeToDecision;
|
||||
this.nodeWeights = nodeWeights == null ? Collections.emptyMap() : nodeWeights;
|
||||
this.remainingDelayNanos = remainingDelayNanos;
|
||||
}
|
||||
|
||||
public ClusterAllocationExplanation(StreamInput in) throws IOException {
|
||||
this.shard = ShardId.readShardId(in);
|
||||
this.primary = in.readBoolean();
|
||||
|
@ -78,18 +90,29 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
|
|||
remainingDelayNanos = in.readVLong();
|
||||
}
|
||||
|
||||
public ClusterAllocationExplanation(ShardId shard, boolean primary, @Nullable String assignedNodeId,
|
||||
UnassignedInfo unassignedInfo, Map<DiscoveryNode, Decision> nodeToDecision,
|
||||
Map<DiscoveryNode, Float> nodeWeights, long remainingDelayNanos) {
|
||||
this.shard = shard;
|
||||
this.primary = primary;
|
||||
this.assignedNodeId = assignedNodeId;
|
||||
this.unassignedInfo = unassignedInfo;
|
||||
this.nodeToDecision = nodeToDecision == null ? Collections.emptyMap() : nodeToDecision;
|
||||
this.nodeWeights = nodeWeights == null ? Collections.emptyMap() : nodeWeights;
|
||||
this.remainingDelayNanos = remainingDelayNanos;
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
this.getShard().writeTo(out);
|
||||
out.writeBoolean(this.isPrimary());
|
||||
out.writeOptionalString(this.getAssignedNodeId());
|
||||
out.writeOptionalWriteable(this.getUnassignedInfo());
|
||||
|
||||
Map<DiscoveryNode, Decision> ntd = this.getNodeDecisions();
|
||||
out.writeVInt(ntd.size());
|
||||
for (Map.Entry<DiscoveryNode, Decision> entry : ntd.entrySet()) {
|
||||
entry.getKey().writeTo(out);
|
||||
Decision.writeTo(entry.getValue(), out);
|
||||
}
|
||||
Map<DiscoveryNode, Float> ntw = this.getNodeWeights();
|
||||
out.writeVInt(ntw.size());
|
||||
for (Map.Entry<DiscoveryNode, Float> entry : ntw.entrySet()) {
|
||||
entry.getKey().writeTo(out);
|
||||
out.writeFloat(entry.getValue());
|
||||
}
|
||||
out.writeVLong(remainingDelayNanos);
|
||||
}
|
||||
|
||||
|
||||
public ShardId getShard() {
|
||||
return this.shard;
|
||||
}
|
||||
|
@ -183,31 +206,4 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
|
|||
builder.endObject(); // end wrapping object
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClusterAllocationExplanation readFrom(StreamInput in) throws IOException {
|
||||
return new ClusterAllocationExplanation(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
this.getShard().writeTo(out);
|
||||
out.writeBoolean(this.isPrimary());
|
||||
out.writeOptionalString(this.getAssignedNodeId());
|
||||
out.writeOptionalWriteable(this.getUnassignedInfo());
|
||||
|
||||
Map<DiscoveryNode, Decision> ntd = this.getNodeDecisions();
|
||||
out.writeVInt(ntd.size());
|
||||
for (Map.Entry<DiscoveryNode, Decision> entry : ntd.entrySet()) {
|
||||
entry.getKey().writeTo(out);
|
||||
Decision.writeTo(entry.getValue(), out);
|
||||
}
|
||||
Map<DiscoveryNode, Float> ntw = this.getNodeWeights();
|
||||
out.writeVInt(ntw.size());
|
||||
for (Map.Entry<DiscoveryNode, Float> entry : ntw.entrySet()) {
|
||||
entry.getKey().writeTo(out);
|
||||
out.writeFloat(entry.getValue());
|
||||
}
|
||||
out.writeVLong(remainingDelayNanos);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ import java.util.concurrent.TimeUnit;
|
|||
* and use in APIs. Instead, immutable and streamable TaskInfo objects are used to represent
|
||||
* snapshot information about currently running tasks.
|
||||
*/
|
||||
public class TaskInfo implements Writeable<TaskInfo>, ToXContent {
|
||||
public class TaskInfo implements Writeable, ToXContent {
|
||||
|
||||
private final DiscoveryNode node;
|
||||
|
||||
|
@ -75,6 +75,9 @@ public class TaskInfo implements Writeable<TaskInfo>, ToXContent {
|
|||
this.parentTaskId = parentTaskId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public TaskInfo(StreamInput in) throws IOException {
|
||||
node = new DiscoveryNode(in);
|
||||
taskId = new TaskId(node.getId(), in.readLong());
|
||||
|
@ -88,6 +91,20 @@ public class TaskInfo implements Writeable<TaskInfo>, ToXContent {
|
|||
parentTaskId = TaskId.readFromStream(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
node.writeTo(out);
|
||||
out.writeLong(taskId.getId());
|
||||
out.writeString(type);
|
||||
out.writeString(action);
|
||||
out.writeOptionalString(description);
|
||||
out.writeOptionalNamedWriteable(status);
|
||||
out.writeLong(startTime);
|
||||
out.writeLong(runningTimeNanos);
|
||||
out.writeBoolean(cancellable);
|
||||
parentTaskId.writeTo(out);
|
||||
}
|
||||
|
||||
public TaskId getTaskId() {
|
||||
return taskId;
|
||||
}
|
||||
|
@ -148,25 +165,6 @@ public class TaskInfo implements Writeable<TaskInfo>, ToXContent {
|
|||
return parentTaskId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TaskInfo readFrom(StreamInput in) throws IOException {
|
||||
return new TaskInfo(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
node.writeTo(out);
|
||||
out.writeLong(taskId.getId());
|
||||
out.writeString(type);
|
||||
out.writeString(action);
|
||||
out.writeOptionalString(description);
|
||||
out.writeOptionalNamedWriteable(status);
|
||||
out.writeLong(startTime);
|
||||
out.writeLong(runningTimeNanos);
|
||||
out.writeBoolean(cancellable);
|
||||
parentTaskId.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field("node", node.getId());
|
||||
|
|
|
@ -48,7 +48,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNodes> {
|
||||
public class ClusterStatsNodes implements ToXContent, Writeable {
|
||||
|
||||
private final Counts counts;
|
||||
private final Set<Version> versions;
|
||||
|
@ -200,7 +200,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static class Counts implements Writeable<Counts>, ToXContent {
|
||||
public static class Counts implements Writeable, ToXContent {
|
||||
static final String COORDINATING_ONLY = "coordinating_only";
|
||||
|
||||
private final int total;
|
||||
|
@ -263,7 +263,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
|||
}
|
||||
}
|
||||
|
||||
public static class OsStats implements ToXContent, Writeable<OsStats> {
|
||||
public static class OsStats implements ToXContent, Writeable {
|
||||
final int availableProcessors;
|
||||
final int allocatedProcessors;
|
||||
final ObjectIntHashMap<String> names;
|
||||
|
@ -343,7 +343,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
|||
}
|
||||
}
|
||||
|
||||
public static class ProcessStats implements ToXContent, Writeable<ProcessStats> {
|
||||
public static class ProcessStats implements ToXContent, Writeable {
|
||||
|
||||
final int count;
|
||||
final int cpuPercent;
|
||||
|
@ -456,7 +456,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
|||
}
|
||||
}
|
||||
|
||||
public static class JvmStats implements Writeable<JvmStats>, ToXContent {
|
||||
public static class JvmStats implements Writeable, ToXContent {
|
||||
|
||||
private final ObjectIntHashMap<JvmVersion> versions;
|
||||
private final long threads;
|
||||
|
|
|
@ -78,7 +78,7 @@ public class BulkItemResponse implements Streamable, StatusToXContent {
|
|||
/**
|
||||
* Represents a failure.
|
||||
*/
|
||||
public static class Failure implements Writeable<Failure>, ToXContent {
|
||||
public static class Failure implements Writeable, ToXContent {
|
||||
static final String INDEX_FIELD = "index";
|
||||
static final String TYPE_FIELD = "type";
|
||||
static final String ID_FIELD = "id";
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* Holds the end result of what a pipeline did to sample document provided via the simulate api.
|
||||
*/
|
||||
public final class SimulateDocumentBaseResult implements SimulateDocumentResult<SimulateDocumentBaseResult> {
|
||||
public final class SimulateDocumentBaseResult implements SimulateDocumentResult {
|
||||
private final WriteableIngestDocument ingestDocument;
|
||||
private final Exception failure;
|
||||
|
||||
|
|
|
@ -21,6 +21,6 @@ package org.elasticsearch.action.ingest;
|
|||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
||||
public interface SimulateDocumentResult<T extends SimulateDocumentResult> extends Writeable<T>, ToXContent {
|
||||
public interface SimulateDocumentResult extends Writeable, ToXContent {
|
||||
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.List;
|
|||
* Holds the result of what a pipeline did to a sample document via the simulate api, but instead of {@link SimulateDocumentBaseResult}
|
||||
* this result class holds the intermediate result each processor did to the sample document.
|
||||
*/
|
||||
public final class SimulateDocumentVerboseResult implements SimulateDocumentResult<SimulateDocumentVerboseResult> {
|
||||
public final class SimulateDocumentVerboseResult implements SimulateDocumentResult {
|
||||
private final List<SimulateProcessorResult> processorResults;
|
||||
|
||||
public SimulateDocumentVerboseResult(List<SimulateProcessorResult> processorResults) {
|
||||
|
|
|
@ -76,7 +76,7 @@ public class SimulatePipelineResponse extends ActionResponse implements ToXConte
|
|||
int responsesLength = in.readVInt();
|
||||
results = new ArrayList<>();
|
||||
for (int i = 0; i < responsesLength; i++) {
|
||||
SimulateDocumentResult<?> simulateDocumentResult;
|
||||
SimulateDocumentResult simulateDocumentResult;
|
||||
if (verbose) {
|
||||
simulateDocumentResult = new SimulateDocumentVerboseResult(in);
|
||||
} else {
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.ingest.core.IngestDocument;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class SimulateProcessorResult implements Writeable<SimulateProcessorResult>, ToXContent {
|
||||
public class SimulateProcessorResult implements Writeable, ToXContent {
|
||||
private final String processorTag;
|
||||
private final WriteableIngestDocument ingestDocument;
|
||||
private final Exception failure;
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.io.IOException;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
final class WriteableIngestDocument implements Writeable<WriteableIngestDocument>, ToXContent {
|
||||
final class WriteableIngestDocument implements Writeable, ToXContent {
|
||||
|
||||
private final IngestDocument ingestDocument;
|
||||
|
||||
|
@ -46,22 +46,16 @@ final class WriteableIngestDocument implements Writeable<WriteableIngestDocument
|
|||
this.ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata);
|
||||
}
|
||||
|
||||
IngestDocument getIngestDocument() {
|
||||
return ingestDocument;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public WriteableIngestDocument readFrom(StreamInput in) throws IOException {
|
||||
return new WriteableIngestDocument(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeMap(ingestDocument.getSourceAndMetadata());
|
||||
out.writeGenericValue(ingestDocument.getIngestMetadata());
|
||||
}
|
||||
|
||||
IngestDocument getIngestDocument() {
|
||||
return ingestDocument;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("doc");
|
||||
|
|
|
@ -61,10 +61,10 @@ import java.util.function.Supplier;
|
|||
* The base class for transport actions that are interacting with currently running tasks.
|
||||
*/
|
||||
public abstract class TransportTasksAction<
|
||||
OperationTask extends Task,
|
||||
TasksRequest extends BaseTasksRequest<TasksRequest>,
|
||||
TasksResponse extends BaseTasksResponse,
|
||||
TaskResponse extends Writeable<TaskResponse>
|
||||
OperationTask extends Task,
|
||||
TasksRequest extends BaseTasksRequest<TasksRequest>,
|
||||
TasksResponse extends BaseTasksResponse,
|
||||
TaskResponse extends Writeable
|
||||
> extends HandledTransportAction<TasksRequest, TasksResponse> {
|
||||
|
||||
protected final ClusterName clusterName;
|
||||
|
|
|
@ -27,7 +27,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* Cluster state part, changes in which can be serialized
|
||||
*/
|
||||
public interface Diffable<T> extends Writeable<T> {
|
||||
public interface Diffable<T> extends Writeable {
|
||||
|
||||
/**
|
||||
* Returns serializable object representing differences between this and previousState
|
||||
|
|
|
@ -44,7 +44,7 @@ import static org.elasticsearch.common.transport.TransportAddressSerializers.add
|
|||
/**
|
||||
* A discovery node represents a node that is part of the cluster.
|
||||
*/
|
||||
public class DiscoveryNode implements Writeable<DiscoveryNode>, ToXContent {
|
||||
public class DiscoveryNode implements Writeable, ToXContent {
|
||||
|
||||
public static boolean isLocalNode(Settings settings) {
|
||||
if (Node.NODE_LOCAL_SETTING.exists(settings)) {
|
||||
|
@ -88,34 +88,6 @@ public class DiscoveryNode implements Writeable<DiscoveryNode>, ToXContent {
|
|||
private final Version version;
|
||||
private final Set<Role> roles;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DiscoveryNode} by reading from the stream provided as argument
|
||||
* @param in the stream
|
||||
* @throws IOException if there is an error while reading from the stream
|
||||
*/
|
||||
public DiscoveryNode(StreamInput in) throws IOException {
|
||||
this.nodeName = in.readString().intern();
|
||||
this.nodeId = in.readString().intern();
|
||||
this.hostName = in.readString().intern();
|
||||
this.hostAddress = in.readString().intern();
|
||||
this.address = TransportAddressSerializers.addressFromStream(in);
|
||||
int size = in.readVInt();
|
||||
this.attributes = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
this.attributes.put(in.readString(), in.readString());
|
||||
}
|
||||
int rolesSize = in.readVInt();
|
||||
this.roles = EnumSet.noneOf(Role.class);
|
||||
for (int i = 0; i < rolesSize; i++) {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= Role.values().length) {
|
||||
throw new IOException("Unknown Role ordinal [" + ordinal + "]");
|
||||
}
|
||||
this.roles.add(Role.values()[ordinal]);
|
||||
}
|
||||
this.version = Version.readVersion(in);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link DiscoveryNode}
|
||||
* <p>
|
||||
|
@ -204,6 +176,53 @@ public class DiscoveryNode implements Writeable<DiscoveryNode>, ToXContent {
|
|||
this.roles = Collections.unmodifiableSet(rolesSet);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link DiscoveryNode} by reading from the stream provided as argument
|
||||
* @param in the stream
|
||||
* @throws IOException if there is an error while reading from the stream
|
||||
*/
|
||||
public DiscoveryNode(StreamInput in) throws IOException {
|
||||
this.nodeName = in.readString().intern();
|
||||
this.nodeId = in.readString().intern();
|
||||
this.hostName = in.readString().intern();
|
||||
this.hostAddress = in.readString().intern();
|
||||
this.address = TransportAddressSerializers.addressFromStream(in);
|
||||
int size = in.readVInt();
|
||||
this.attributes = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
this.attributes.put(in.readString(), in.readString());
|
||||
}
|
||||
int rolesSize = in.readVInt();
|
||||
this.roles = EnumSet.noneOf(Role.class);
|
||||
for (int i = 0; i < rolesSize; i++) {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= Role.values().length) {
|
||||
throw new IOException("Unknown Role ordinal [" + ordinal + "]");
|
||||
}
|
||||
this.roles.add(Role.values()[ordinal]);
|
||||
}
|
||||
this.version = Version.readVersion(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(nodeName);
|
||||
out.writeString(nodeId);
|
||||
out.writeString(hostName);
|
||||
out.writeString(hostAddress);
|
||||
addressToStream(out, address);
|
||||
out.writeVInt(attributes.size());
|
||||
for (Map.Entry<String, String> entry : attributes.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeString(entry.getValue());
|
||||
}
|
||||
out.writeVInt(roles.size());
|
||||
for (Role role : roles) {
|
||||
out.writeVInt(role.ordinal());
|
||||
}
|
||||
Version.writeVersion(version, out);
|
||||
}
|
||||
|
||||
/**
|
||||
* The address that the node can be communicated with.
|
||||
*/
|
||||
|
@ -273,30 +292,6 @@ public class DiscoveryNode implements Writeable<DiscoveryNode>, ToXContent {
|
|||
return this.hostAddress;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DiscoveryNode readFrom(StreamInput in) throws IOException {
|
||||
return new DiscoveryNode(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(nodeName);
|
||||
out.writeString(nodeId);
|
||||
out.writeString(hostName);
|
||||
out.writeString(hostAddress);
|
||||
addressToStream(out, address);
|
||||
out.writeVInt(attributes.size());
|
||||
for (Map.Entry<String, String> entry : attributes.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeString(entry.getValue());
|
||||
}
|
||||
out.writeVInt(roles.size());
|
||||
for (Role role : roles) {
|
||||
out.writeVInt(role.ordinal());
|
||||
}
|
||||
Version.writeVersion(version, out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof DiscoveryNode)) {
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* Holds additional information as to why the shard is in unassigned state.
|
||||
*/
|
||||
public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
||||
public class UnassignedInfo implements ToXContent, Writeable {
|
||||
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime");
|
||||
private static final TimeValue DEFAULT_DELAYED_NODE_LEFT_TIMEOUT = TimeValue.timeValueMinutes(1);
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* This interface defines the basic methods of commands for allocation
|
||||
*/
|
||||
public interface AllocationCommand extends NamedWriteable<AllocationCommand>, ToXContent {
|
||||
public interface AllocationCommand extends NamedWriteable, ToXContent {
|
||||
interface Parser<T extends AllocationCommand> {
|
||||
/**
|
||||
* Reads an {@link AllocationCommand} of type <code>T</code> from a {@link XContentParser}.
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.util.Locale;
|
|||
/**
|
||||
* Geo distance calculation.
|
||||
*/
|
||||
public enum GeoDistance implements Writeable<GeoDistance> {
|
||||
public enum GeoDistance implements Writeable {
|
||||
/**
|
||||
* Calculates distance as points on a plane. Faster, but less accurate than {@link #ARC}.
|
||||
*/
|
||||
|
@ -126,9 +126,7 @@ public enum GeoDistance implements Writeable<GeoDistance> {
|
|||
}
|
||||
};
|
||||
|
||||
/** Returns a GeoDistance object as read from the StreamInput. */
|
||||
@Override
|
||||
public GeoDistance readFrom(StreamInput in) throws IOException {
|
||||
public static GeoDistance readFromStream(StreamInput in) throws IOException {
|
||||
int ord = in.readVInt();
|
||||
if (ord < 0 || ord >= values().length) {
|
||||
throw new IOException("Unknown GeoDistance ordinal [" + ord + "]");
|
||||
|
@ -136,10 +134,6 @@ public enum GeoDistance implements Writeable<GeoDistance> {
|
|||
return GeoDistance.values()[ord];
|
||||
}
|
||||
|
||||
public static GeoDistance readFromStream(StreamInput in) throws IOException {
|
||||
return DEFAULT.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.ordinal());
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Locale;
|
|||
* Enum representing the relationship between a Query / Filter Shape and indexed Shapes
|
||||
* that will be used to determine if a Document should be matched or not
|
||||
*/
|
||||
public enum ShapeRelation implements Writeable<ShapeRelation>{
|
||||
public enum ShapeRelation implements Writeable {
|
||||
|
||||
INTERSECTS("intersects"),
|
||||
DISJOINT("disjoint"),
|
||||
|
@ -43,8 +43,7 @@ public enum ShapeRelation implements Writeable<ShapeRelation>{
|
|||
this.relationName = relationName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ShapeRelation readFrom(StreamInput in) throws IOException {
|
||||
public static ShapeRelation readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown ShapeRelation ordinal [" + ordinal + "]");
|
||||
|
|
|
@ -27,7 +27,7 @@ import java.io.IOException;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public enum SpatialStrategy implements Writeable<SpatialStrategy> {
|
||||
public enum SpatialStrategy implements Writeable {
|
||||
|
||||
TERM("term"),
|
||||
RECURSIVE("recursive");
|
||||
|
@ -42,8 +42,7 @@ public enum SpatialStrategy implements Writeable<SpatialStrategy> {
|
|||
return strategyName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpatialStrategy readFrom(StreamInput in) throws IOException {
|
||||
public static SpatialStrategy readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown SpatialStrategy ordinal [" + ordinal + "]");
|
||||
|
|
|
@ -51,7 +51,7 @@ import java.util.Locale;
|
|||
/**
|
||||
* Basic class for building GeoJSON shapes like Polygons, Linestrings, etc
|
||||
*/
|
||||
public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWriteable<ShapeBuilder> {
|
||||
public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWriteable {
|
||||
|
||||
protected static final ESLogger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName());
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ package org.elasticsearch.common.io.stream;
|
|||
* To be used for arbitrary serializable objects (e.g. queries); when reading them, their name tells
|
||||
* which specific object needs to be created.
|
||||
*/
|
||||
public interface NamedWriteable<T> extends Writeable<T> {
|
||||
public interface NamedWriteable extends Writeable {
|
||||
|
||||
/**
|
||||
* Returns the name of the writeable object
|
||||
|
|
|
@ -34,7 +34,7 @@ public class NamedWriteableAwareStreamInput extends FilterStreamInput {
|
|||
}
|
||||
|
||||
@Override
|
||||
public <C extends NamedWriteable<?>> C readNamedWriteable(Class<C> categoryClass) throws IOException {
|
||||
public <C extends NamedWriteable> C readNamedWriteable(Class<C> categoryClass) throws IOException {
|
||||
String name = readString();
|
||||
Writeable.Reader<? extends C> reader = namedWriteableRegistry.getReader(categoryClass, name);
|
||||
C c = reader.read(this);
|
||||
|
|
|
@ -36,7 +36,6 @@ public class NamedWriteableRegistry {
|
|||
* This method suppresses the rawtypes warning because it intentionally using NamedWriteable instead of {@code NamedWriteable<T>} so it
|
||||
* is easier to use and because we might be able to drop the type parameter from NamedWriteable entirely some day.
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
public synchronized <T extends NamedWriteable> void register(Class<T> categoryClass, String name,
|
||||
Writeable.Reader<? extends T> reader) {
|
||||
@SuppressWarnings("unchecked")
|
||||
|
|
|
@ -714,14 +714,14 @@ public abstract class StreamInput extends InputStream {
|
|||
* Use {@link FilterInputStream} instead which wraps a stream and supports a {@link NamedWriteableRegistry} too.
|
||||
*/
|
||||
@Nullable
|
||||
public <C extends NamedWriteable<?>> C readNamedWriteable(@SuppressWarnings("unused") Class<C> categoryClass) throws IOException {
|
||||
public <C extends NamedWriteable> C readNamedWriteable(@SuppressWarnings("unused") Class<C> categoryClass) throws IOException {
|
||||
throw new UnsupportedOperationException("can't read named writeable from StreamInput");
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an optional {@link NamedWriteable}.
|
||||
*/
|
||||
public <C extends NamedWriteable<?>> C readOptionalNamedWriteable(Class<C> categoryClass) throws IOException {
|
||||
public <C extends NamedWriteable> C readOptionalNamedWriteable(Class<C> categoryClass) throws IOException {
|
||||
if (readBoolean()) {
|
||||
return readNamedWriteable(categoryClass);
|
||||
}
|
||||
|
|
|
@ -544,7 +544,7 @@ public abstract class StreamOutput extends OutputStream {
|
|||
}
|
||||
}
|
||||
|
||||
public void writeOptionalWriteable(@Nullable Writeable<?> writeable) throws IOException {
|
||||
public void writeOptionalWriteable(@Nullable Writeable writeable) throws IOException {
|
||||
if (writeable != null) {
|
||||
writeBoolean(true);
|
||||
writeable.writeTo(this);
|
||||
|
@ -675,7 +675,7 @@ public abstract class StreamOutput extends OutputStream {
|
|||
/**
|
||||
* Writes a {@link NamedWriteable} to the current stream, by first writing its name and then the object itself
|
||||
*/
|
||||
public void writeNamedWriteable(NamedWriteable<?> namedWriteable) throws IOException {
|
||||
public void writeNamedWriteable(NamedWriteable namedWriteable) throws IOException {
|
||||
writeString(namedWriteable.getWriteableName());
|
||||
namedWriteable.writeTo(this);
|
||||
}
|
||||
|
@ -683,7 +683,7 @@ public abstract class StreamOutput extends OutputStream {
|
|||
/**
|
||||
* Write an optional {@link NamedWriteable} to the stream.
|
||||
*/
|
||||
public void writeOptionalNamedWriteable(@Nullable NamedWriteable<?> namedWriteable) throws IOException {
|
||||
public void writeOptionalNamedWriteable(@Nullable NamedWriteable namedWriteable) throws IOException {
|
||||
if (namedWriteable == null) {
|
||||
writeBoolean(false);
|
||||
} else {
|
||||
|
@ -722,7 +722,7 @@ public abstract class StreamOutput extends OutputStream {
|
|||
/**
|
||||
* Writes a list of {@link Writeable} objects
|
||||
*/
|
||||
public <T extends Writeable<T>> void writeList(List<T> list) throws IOException {
|
||||
public <T extends Writeable> void writeList(List<T> list) throws IOException {
|
||||
writeVInt(list.size());
|
||||
for (T obj: list) {
|
||||
obj.writeTo(this);
|
||||
|
|
|
@ -32,22 +32,12 @@ import java.io.IOException;
|
|||
* Prefer implementing this interface over implementing {@link Streamable} where possible. Lots of code depends on {@linkplain Streamable}
|
||||
* so this isn't always possible.
|
||||
*/
|
||||
public interface Writeable<T> { // TODO remove <T>
|
||||
public interface Writeable {
|
||||
/**
|
||||
* Write this into the {@linkplain StreamOutput}.
|
||||
*/
|
||||
void writeTo(StreamOutput out) throws IOException;
|
||||
|
||||
/**
|
||||
* Read this object from a stream. Use a {@link Writeable.Reader} instead. This lives on for backwards compatibility but should be
|
||||
* removed before 5.0.0GA. It is not deprecated because Diffable extends this interface and it shouldn't be deprecated there.
|
||||
*/
|
||||
default T readFrom(StreamInput in) throws IOException {
|
||||
// NORELEASE remove before 5.0.0GA
|
||||
throw new UnsupportedOperationException(
|
||||
"Prefer calling a constructor or static method that takes a StreamInput to calling readFrom.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Reference to a method that can read some object from a stream. By convention this is a constructor that takes
|
||||
* {@linkplain StreamInput} as an argument for most classes and a static method for things like enums. Returning null from one of these
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public enum CombineFunction implements Writeable<CombineFunction> {
|
||||
public enum CombineFunction implements Writeable {
|
||||
MULTIPLY {
|
||||
@Override
|
||||
public float combine(double queryScore, double funcScore, double maxBoost) {
|
||||
|
|
|
@ -125,7 +125,7 @@ public class FieldValueFactorFunction extends ScoreFunction {
|
|||
* The Type class encapsulates the modification types that can be applied
|
||||
* to the score/value product.
|
||||
*/
|
||||
public enum Modifier implements Writeable<Modifier> {
|
||||
public enum Modifier implements Writeable {
|
||||
NONE {
|
||||
@Override
|
||||
public double apply(double n) {
|
||||
|
|
|
@ -75,7 +75,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
|||
}
|
||||
}
|
||||
|
||||
public enum ScoreMode implements Writeable<ScoreMode> {
|
||||
public enum ScoreMode implements Writeable {
|
||||
FIRST, AVG, MAX, SUM, MIN, MULTIPLY;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public interface TransportAddress extends Writeable<TransportAddress> {
|
||||
public interface TransportAddress extends Writeable {
|
||||
|
||||
/**
|
||||
* Returns the host string for this transport address
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.io.IOException;
|
|||
* the earth ellipsoid defined in {@link GeoUtils}. The default unit used within
|
||||
* this project is <code>METERS</code> which is defined by <code>DEFAULT</code>
|
||||
*/
|
||||
public enum DistanceUnit implements Writeable<DistanceUnit> {
|
||||
public enum DistanceUnit implements Writeable {
|
||||
INCH(0.0254, "in", "inch"),
|
||||
YARD(0.9144, "yd", "yards"),
|
||||
FEET(0.3048, "ft", "feet"),
|
||||
|
|
|
@ -35,7 +35,7 @@ import java.util.Objects;
|
|||
* parsing and conversion from similarities to edit distances
|
||||
* etc.
|
||||
*/
|
||||
public final class Fuzziness implements ToXContent, Writeable<Fuzziness> {
|
||||
public final class Fuzziness implements ToXContent, Writeable {
|
||||
|
||||
public static final String X_FIELD_NAME = "fuzziness";
|
||||
public static final Fuzziness ZERO = new Fuzziness(0);
|
||||
|
|
|
@ -60,7 +60,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
* </pre>
|
||||
*
|
||||
*/
|
||||
public final class ThreadContext implements Closeable, Writeable<ThreadContext> {
|
||||
public final class ThreadContext implements Closeable, Writeable {
|
||||
|
||||
public static final String PREFIX = "request.headers";
|
||||
public static final Setting<Settings> DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", Property.NodeScope);
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.io.IOException;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class Index implements Writeable<Index> {
|
||||
public class Index implements Writeable {
|
||||
|
||||
public static final Index[] EMPTY_ARRAY = new Index[0];
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.io.IOException;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public enum VersionType implements Writeable<VersionType> {
|
||||
public enum VersionType implements Writeable {
|
||||
INTERNAL((byte) 0) {
|
||||
@Override
|
||||
public boolean isVersionConflictForWrites(long currentVersion, long expectedVersion, boolean deleted) {
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import java.io.IOException;
|
||||
|
||||
/** Specifies how a geo query should be run. */
|
||||
public enum GeoExecType implements Writeable<GeoExecType> {
|
||||
public enum GeoExecType implements Writeable {
|
||||
|
||||
MEMORY(0), INDEXED(1);
|
||||
|
||||
|
|
|
@ -154,8 +154,8 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||
indexedShapeIndex = in.readOptionalString();
|
||||
indexedShapePath = in.readOptionalString();
|
||||
}
|
||||
relation = ShapeRelation.DISJOINT.readFrom(in);
|
||||
strategy = in.readOptionalWriteable(SpatialStrategy.RECURSIVE::readFrom);
|
||||
relation = ShapeRelation.readFromStream(in);
|
||||
strategy = in.readOptionalWriteable(SpatialStrategy::readFromStream);
|
||||
ignoreUnmapped = in.readBoolean();
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.io.IOException;
|
|||
* On IGNORE_MALFORMED invalid coordinates are being accepted.
|
||||
* On COERCE invalid coordinates are being corrected to the most likely valid coordinate.
|
||||
* */
|
||||
public enum GeoValidationMethod implements Writeable<GeoValidationMethod>{
|
||||
public enum GeoValidationMethod implements Writeable {
|
||||
COERCE, IGNORE_MALFORMED, STRICT;
|
||||
|
||||
public static final GeoValidationMethod DEFAULT = STRICT;
|
||||
|
|
|
@ -147,7 +147,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
/**
|
||||
* A single item to be used for a {@link MoreLikeThisQueryBuilder}.
|
||||
*/
|
||||
public static final class Item implements ToXContent, Writeable<Item> {
|
||||
public static final class Item implements ToXContent, Writeable {
|
||||
public static final Item[] EMPTY_ARRAY = new Item[0];
|
||||
|
||||
public interface Field {
|
||||
|
|
|
@ -93,7 +93,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
private Float cutoffFrequency = null;
|
||||
private MatchQuery.ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY;
|
||||
|
||||
public enum Type implements Writeable<Type> {
|
||||
public enum Type implements Writeable {
|
||||
|
||||
/**
|
||||
* Uses the best matching boolean field as main score and uses
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.util.CollectionUtils;
|
|||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public enum Operator implements Writeable<Operator> {
|
||||
public enum Operator implements Writeable {
|
||||
OR, AND;
|
||||
|
||||
public BooleanClause.Occur toBooleanClauseOccur() {
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public interface QueryBuilder<QB extends QueryBuilder<QB>> extends NamedWriteable<QB>, ToXContent {
|
||||
public interface QueryBuilder<QB extends QueryBuilder<QB>> extends NamedWriteable, ToXContent {
|
||||
|
||||
/**
|
||||
* Converts this QueryBuilder to a lucene {@link Query}.
|
||||
|
|
|
@ -331,7 +331,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
|
|||
* Function to be associated with an optional filter, meaning it will be executed only for the documents
|
||||
* that match the given filter.
|
||||
*/
|
||||
public static class FilterFunctionBuilder implements ToXContent, Writeable<FilterFunctionBuilder> {
|
||||
public static class FilterFunctionBuilder implements ToXContent, Writeable {
|
||||
private final QueryBuilder<?> filter;
|
||||
private final ScoreFunctionBuilder<?> scoreFunction;
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.index.query.QueryShardContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public abstract class ScoreFunctionBuilder<FB extends ScoreFunctionBuilder<FB>> implements ToXContent, NamedWriteable<FB> {
|
||||
public abstract class ScoreFunctionBuilder<FB extends ScoreFunctionBuilder<FB>> implements ToXContent, NamedWriteable {
|
||||
|
||||
private Float weight;
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ import java.util.Optional;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.END_OBJECT;
|
||||
|
||||
public final class InnerHitBuilder extends ToXContentToBytes implements Writeable<InnerHitBuilder> {
|
||||
public final class InnerHitBuilder extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public static final ParseField NAME_FIELD = new ParseField("name");
|
||||
public static final ParseField NESTED_PATH_FIELD = new ParseField("path");
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class InnerHitsBuilder extends ToXContentToBytes implements Writeable<InnerHitsBuilder> {
|
||||
public final class InnerHitsBuilder extends ToXContentToBytes implements Writeable {
|
||||
private final Map<String, InnerHitBuilder> innerHitsBuilders;
|
||||
|
||||
public InnerHitsBuilder() {
|
||||
|
|
|
@ -48,7 +48,7 @@ import java.io.IOException;
|
|||
|
||||
public class MatchQuery {
|
||||
|
||||
public static enum Type implements Writeable<Type> {
|
||||
public static enum Type implements Writeable {
|
||||
/**
|
||||
* The text is analyzed and terms are added to a boolean query.
|
||||
*/
|
||||
|
@ -84,7 +84,7 @@ public class MatchQuery {
|
|||
}
|
||||
}
|
||||
|
||||
public static enum ZeroTermsQuery implements Writeable<ZeroTermsQuery> {
|
||||
public static enum ZeroTermsQuery implements Writeable {
|
||||
NONE(0),
|
||||
ALL(1);
|
||||
|
||||
|
|
|
@ -729,9 +729,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
*
|
||||
* @see StoreFileMetaData
|
||||
*/
|
||||
public final static class MetadataSnapshot implements Iterable<StoreFileMetaData>, Writeable<MetadataSnapshot> {
|
||||
private static final ESLogger logger = Loggers.getLogger(MetadataSnapshot.class);
|
||||
|
||||
public final static class MetadataSnapshot implements Iterable<StoreFileMetaData>, Writeable {
|
||||
private final Map<String, StoreFileMetaData> metadata;
|
||||
|
||||
public static final MetadataSnapshot EMPTY = new MetadataSnapshot();
|
||||
|
@ -760,6 +758,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
assert metadata.isEmpty() || numSegmentFiles() == 1 : "numSegmentFiles: " + numSegmentFiles();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public MetadataSnapshot(StreamInput in) throws IOException {
|
||||
final int size = in.readVInt();
|
||||
Map<String, StoreFileMetaData> metadata = new HashMap<>();
|
||||
|
@ -779,6 +780,20 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
assert metadata.isEmpty() || numSegmentFiles() == 1 : "numSegmentFiles: " + numSegmentFiles();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.metadata.size());
|
||||
for (StoreFileMetaData meta : this) {
|
||||
meta.writeTo(out);
|
||||
}
|
||||
out.writeVInt(commitUserData.size());
|
||||
for (Map.Entry<String, String> entry : commitUserData.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeString(entry.getValue());
|
||||
}
|
||||
out.writeLong(numDocs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of documents in this store snapshot
|
||||
*/
|
||||
|
@ -1020,20 +1035,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
return metadata.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.metadata.size());
|
||||
for (StoreFileMetaData meta : this) {
|
||||
meta.writeTo(out);
|
||||
}
|
||||
out.writeVInt(commitUserData.size());
|
||||
for (Map.Entry<String, String> entry : commitUserData.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeString(entry.getValue());
|
||||
}
|
||||
out.writeLong(numDocs);
|
||||
}
|
||||
|
||||
public Map<String, String> getCommitUserData() {
|
||||
return commitUserData;
|
||||
}
|
||||
|
@ -1076,11 +1077,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
public String getSyncId() {
|
||||
return commitUserData.get(Engine.SYNC_COMMIT_ID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataSnapshot readFrom(StreamInput in) throws IOException {
|
||||
return new MetadataSnapshot(in);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* Encapsulates the parameters needed to fetch terms.
|
||||
*/
|
||||
public class TermsLookup implements Writeable<TermsLookup>, ToXContent {
|
||||
public class TermsLookup implements Writeable, ToXContent {
|
||||
private String index;
|
||||
private final String type;
|
||||
private final String id;
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class IngestStats implements Writeable<IngestStats>, ToXContent {
|
||||
public class IngestStats implements Writeable, ToXContent {
|
||||
private final Stats totalStats;
|
||||
private final Map<String, Stats> statsPerPipeline;
|
||||
|
||||
|
@ -93,7 +93,7 @@ public class IngestStats implements Writeable<IngestStats>, ToXContent {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static class Stats implements Writeable<Stats>, ToXContent {
|
||||
public static class Stats implements Writeable, ToXContent {
|
||||
|
||||
private final long ingestCount;
|
||||
private final long ingestTimeInMillis;
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Objects;
|
|||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
public class IngestInfo implements Writeable<IngestInfo>, ToXContent {
|
||||
public class IngestInfo implements Writeable, ToXContent {
|
||||
|
||||
private final Set<ProcessorInfo> processors;
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ProcessorInfo implements Writeable<ProcessorInfo>, ToXContent, Comparable<ProcessorInfo> {
|
||||
public class ProcessorInfo implements Writeable, ToXContent, Comparable<ProcessorInfo> {
|
||||
|
||||
private final String type;
|
||||
|
||||
|
|
|
@ -42,28 +42,28 @@ public final class DateProcessor extends AbstractProcessor {
|
|||
|
||||
private final DateTimeZone timezone;
|
||||
private final Locale locale;
|
||||
private final String matchField;
|
||||
private final String field;
|
||||
private final String targetField;
|
||||
private final List<String> matchFormats;
|
||||
private final List<String> formats;
|
||||
private final List<Function<String, DateTime>> dateParsers;
|
||||
|
||||
DateProcessor(String tag, DateTimeZone timezone, Locale locale, String matchField, List<String> matchFormats, String targetField) {
|
||||
DateProcessor(String tag, DateTimeZone timezone, Locale locale, String field, List<String> formats, String targetField) {
|
||||
super(tag);
|
||||
this.timezone = timezone;
|
||||
this.locale = locale;
|
||||
this.matchField = matchField;
|
||||
this.field = field;
|
||||
this.targetField = targetField;
|
||||
this.matchFormats = matchFormats;
|
||||
this.formats = formats;
|
||||
this.dateParsers = new ArrayList<>();
|
||||
for (String matchFormat : matchFormats) {
|
||||
DateFormat dateFormat = DateFormat.fromString(matchFormat);
|
||||
dateParsers.add(dateFormat.getFunction(matchFormat, timezone, locale));
|
||||
for (String format : formats) {
|
||||
DateFormat dateFormat = DateFormat.fromString(format);
|
||||
dateParsers.add(dateFormat.getFunction(format, timezone, locale));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument ingestDocument) {
|
||||
String value = ingestDocument.getFieldValue(matchField, String.class);
|
||||
String value = ingestDocument.getFieldValue(field, String.class);
|
||||
|
||||
DateTime dateTime = null;
|
||||
Exception lastException = null;
|
||||
|
@ -96,23 +96,23 @@ public final class DateProcessor extends AbstractProcessor {
|
|||
return locale;
|
||||
}
|
||||
|
||||
String getMatchField() {
|
||||
return matchField;
|
||||
String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
String getTargetField() {
|
||||
return targetField;
|
||||
}
|
||||
|
||||
List<String> getMatchFormats() {
|
||||
return matchFormats;
|
||||
List<String> getFormats() {
|
||||
return formats;
|
||||
}
|
||||
|
||||
public static final class Factory extends AbstractProcessorFactory<DateProcessor> {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public DateProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
|
||||
String matchField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "match_field");
|
||||
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
|
||||
String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD);
|
||||
String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "timezone");
|
||||
DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString);
|
||||
|
@ -125,8 +125,8 @@ public final class DateProcessor extends AbstractProcessor {
|
|||
throw new IllegalArgumentException("Invalid language tag specified: " + localeString);
|
||||
}
|
||||
}
|
||||
List<String> matchFormats = ConfigurationUtils.readList(TYPE, processorTag, config, "match_formats");
|
||||
return new DateProcessor(processorTag, timezone, locale, matchField, matchFormats, targetField);
|
||||
List<String> formats = ConfigurationUtils.readList(TYPE, processorTag, config, "formats");
|
||||
return new DateProcessor(processorTag, timezone, locale, field, formats, targetField);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,39 +33,39 @@ public final class RenameProcessor extends AbstractProcessor {
|
|||
|
||||
public static final String TYPE = "rename";
|
||||
|
||||
private final String oldFieldName;
|
||||
private final String newFieldName;
|
||||
private final String field;
|
||||
private final String targetField;
|
||||
|
||||
RenameProcessor(String tag, String oldFieldName, String newFieldName) {
|
||||
RenameProcessor(String tag, String field, String targetField) {
|
||||
super(tag);
|
||||
this.oldFieldName = oldFieldName;
|
||||
this.newFieldName = newFieldName;
|
||||
this.field = field;
|
||||
this.targetField = targetField;
|
||||
}
|
||||
|
||||
String getOldFieldName() {
|
||||
return oldFieldName;
|
||||
String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
String getNewFieldName() {
|
||||
return newFieldName;
|
||||
String getTargetField() {
|
||||
return targetField;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument document) {
|
||||
if (document.hasField(oldFieldName) == false) {
|
||||
throw new IllegalArgumentException("field [" + oldFieldName + "] doesn't exist");
|
||||
if (document.hasField(field) == false) {
|
||||
throw new IllegalArgumentException("field [" + field + "] doesn't exist");
|
||||
}
|
||||
if (document.hasField(newFieldName)) {
|
||||
throw new IllegalArgumentException("field [" + newFieldName + "] already exists");
|
||||
if (document.hasField(targetField)) {
|
||||
throw new IllegalArgumentException("field [" + targetField + "] already exists");
|
||||
}
|
||||
|
||||
Object oldValue = document.getFieldValue(oldFieldName, Object.class);
|
||||
document.setFieldValue(newFieldName, oldValue);
|
||||
Object oldValue = document.getFieldValue(field, Object.class);
|
||||
document.setFieldValue(targetField, oldValue);
|
||||
try {
|
||||
document.removeField(oldFieldName);
|
||||
document.removeField(field);
|
||||
} catch (Exception e) {
|
||||
//remove the new field if the removal of the old one failed
|
||||
document.removeField(newFieldName);
|
||||
document.removeField(targetField);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
@ -79,8 +79,8 @@ public final class RenameProcessor extends AbstractProcessor {
|
|||
@Override
|
||||
public RenameProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
|
||||
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
|
||||
String newField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "to");
|
||||
return new RenameProcessor(processorTag, field, newField);
|
||||
String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field");
|
||||
return new RenameProcessor(processorTag, field, targetField);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,8 +53,8 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
public static final ParseField TEXT = new ParseField("text");
|
||||
public static final ParseField FIELD = new ParseField("field");
|
||||
public static final ParseField TOKENIZER = new ParseField("tokenizer");
|
||||
public static final ParseField TOKEN_FILTERS = new ParseField("token_filters", "filters");
|
||||
public static final ParseField CHAR_FILTERS = new ParseField("char_filters");
|
||||
public static final ParseField TOKEN_FILTERS = new ParseField("filter", "token_filter");
|
||||
public static final ParseField CHAR_FILTERS = new ParseField("char_filter");
|
||||
public static final ParseField EXPLAIN = new ParseField("explain");
|
||||
public static final ParseField ATTRIBUTES = new ParseField("attributes");
|
||||
}
|
||||
|
@ -78,8 +78,8 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
analyzeRequest.analyzer(request.param("analyzer"));
|
||||
analyzeRequest.field(request.param("field"));
|
||||
analyzeRequest.tokenizer(request.param("tokenizer"));
|
||||
analyzeRequest.tokenFilters(request.paramAsStringArray("token_filters", request.paramAsStringArray("filters", analyzeRequest.tokenFilters())));
|
||||
analyzeRequest.charFilters(request.paramAsStringArray("char_filters", analyzeRequest.charFilters()));
|
||||
analyzeRequest.tokenFilters(request.paramAsStringArray("filter", request.paramAsStringArray("token_filter", analyzeRequest.tokenFilters())));
|
||||
analyzeRequest.charFilters(request.paramAsStringArray("char_filter", analyzeRequest.charFilters()));
|
||||
analyzeRequest.explain(request.paramAsBoolean("explain", false));
|
||||
analyzeRequest.attributes(request.paramAsStringArray("attributes", analyzeRequest.attributes()));
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ import java.util.Objects;
|
|||
import java.util.concurrent.Callable;
|
||||
|
||||
/** A formatter for values as returned by the fielddata/doc-values APIs. */
|
||||
public interface DocValueFormat extends NamedWriteable<DocValueFormat> {
|
||||
public interface DocValueFormat extends NamedWriteable {
|
||||
|
||||
String format(long value);
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ import java.util.Locale;
|
|||
/**
|
||||
* Defines what values to pick in the case a document contains multiple values for a particular field.
|
||||
*/
|
||||
public enum MultiValueMode implements Writeable<MultiValueMode> {
|
||||
public enum MultiValueMode implements Writeable {
|
||||
|
||||
/**
|
||||
* Pick the sum of all the values.
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* The target that the search request was executed on.
|
||||
*/
|
||||
public class SearchShardTarget implements Writeable<SearchShardTarget>, Comparable<SearchShardTarget> {
|
||||
public class SearchShardTarget implements Writeable, Comparable<SearchShardTarget> {
|
||||
|
||||
private Text nodeId;
|
||||
private Text index;
|
||||
|
@ -100,11 +100,6 @@ public class SearchShardTarget implements Writeable<SearchShardTarget>, Comparab
|
|||
return i;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchShardTarget readFrom(StreamInput in) throws IOException {
|
||||
return new SearchShardTarget(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
if (nodeId == null) {
|
||||
|
|
|
@ -102,7 +102,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
|||
public abstract InternalAggregation buildEmptyAggregation();
|
||||
|
||||
/** Aggregation mode for sub aggregations. */
|
||||
public enum SubAggCollectionMode implements Writeable<SubAggCollectionMode> {
|
||||
public enum SubAggCollectionMode implements Writeable {
|
||||
|
||||
/**
|
||||
* Creates buckets and delegates to child aggregators in a single pass over
|
||||
|
@ -139,8 +139,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
|||
throw new ElasticsearchParseException("no [{}] found for value [{}]", KEY.getPreferredName(), value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SubAggCollectionMode readFrom(StreamInput in) throws IOException {
|
||||
public static SubAggCollectionMode readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown SubAggCollectionMode ordinal [" + ordinal + "]");
|
||||
|
|
|
@ -36,8 +36,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* A factory that knows how to create an {@link Aggregator} of a specific type.
|
||||
*/
|
||||
public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extends ToXContentToBytes
|
||||
implements NamedWriteable<AB>, ToXContent {
|
||||
public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extends ToXContentToBytes implements NamedWriteable, ToXContent {
|
||||
|
||||
protected String name;
|
||||
protected Type type;
|
||||
|
|
|
@ -122,7 +122,7 @@ public class AggregatorFactories {
|
|||
}
|
||||
}
|
||||
|
||||
public static class Builder extends ToXContentToBytes implements Writeable<Builder> {
|
||||
public static class Builder extends ToXContentToBytes implements Writeable {
|
||||
private final Set<String> names = new HashSet<>();
|
||||
private final List<AggregatorBuilder<?>> aggregatorBuilders = new ArrayList<>();
|
||||
private final List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders = new ArrayList<>();
|
||||
|
|
|
@ -57,7 +57,7 @@ public class FiltersAggregator extends BucketsAggregator {
|
|||
public static final ParseField OTHER_BUCKET_FIELD = new ParseField("other_bucket");
|
||||
public static final ParseField OTHER_BUCKET_KEY_FIELD = new ParseField("other_bucket_key");
|
||||
|
||||
public static class KeyedFilter implements Writeable<KeyedFilter>, ToXContent {
|
||||
public static class KeyedFilter implements Writeable, ToXContent {
|
||||
private final String key;
|
||||
private final QueryBuilder<?> filter;
|
||||
|
||||
|
|
|
@ -49,19 +49,13 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
*/
|
||||
public DateHistogramAggregatorBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalDateHistogram.HISTOGRAM_FACTORY);
|
||||
if (in.readBoolean()) {
|
||||
dateHistogramInterval = DateHistogramInterval.readFromStream(in);
|
||||
}
|
||||
dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
super.innerWriteTo(out);
|
||||
boolean hasDateInterval = dateHistogramInterval != null;
|
||||
out.writeBoolean(hasDateInterval);
|
||||
if (hasDateInterval) {
|
||||
dateHistogramInterval.writeTo(out);
|
||||
}
|
||||
out.writeOptionalWriteable(dateHistogramInterval);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* The interval the date histogram is based on.
|
||||
*/
|
||||
public class DateHistogramInterval implements Writeable<DateHistogramInterval> {
|
||||
public class DateHistogramInterval implements Writeable {
|
||||
|
||||
public static final DateHistogramInterval SECOND = new DateHistogramInterval("1s");
|
||||
public static final DateHistogramInterval MINUTE = new DateHistogramInterval("1m");
|
||||
|
@ -40,10 +40,6 @@ public class DateHistogramInterval implements Writeable<DateHistogramInterval> {
|
|||
public static final DateHistogramInterval QUARTER = new DateHistogramInterval("1q");
|
||||
public static final DateHistogramInterval YEAR = new DateHistogramInterval("1y");
|
||||
|
||||
public static final DateHistogramInterval readFromStream(StreamInput in) throws IOException {
|
||||
return SECOND.readFrom(in);
|
||||
}
|
||||
|
||||
public static DateHistogramInterval seconds(int sec) {
|
||||
return new DateHistogramInterval(sec + "s");
|
||||
}
|
||||
|
@ -70,6 +66,19 @@ public class DateHistogramInterval implements Writeable<DateHistogramInterval> {
|
|||
this.expression = expression;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public DateHistogramInterval(StreamInput in) throws IOException {
|
||||
expression = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(expression);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return expression;
|
||||
|
@ -91,14 +100,4 @@ public class DateHistogramInterval implements Writeable<DateHistogramInterval> {
|
|||
DateHistogramInterval other = (DateHistogramInterval) obj;
|
||||
return Objects.equals(expression, other.expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateHistogramInterval readFrom(StreamInput in) throws IOException {
|
||||
return new DateHistogramInterval(in.readString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(expression);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ExtendedBounds implements ToXContent, Writeable<ExtendedBounds> {
|
||||
public class ExtendedBounds implements ToXContent, Writeable {
|
||||
|
||||
static final ParseField EXTENDED_BOUNDS_FIELD = new ParseField("extended_bounds");
|
||||
static final ParseField MIN_FIELD = new ParseField("min");
|
||||
|
|
|
@ -57,7 +57,7 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
public static final ParseField RANGES_FIELD = new ParseField("ranges");
|
||||
public static final ParseField KEYED_FIELD = new ParseField("keyed");
|
||||
|
||||
public static class Range implements Writeable<Range>, ToXContent {
|
||||
public static class Range implements Writeable, ToXContent {
|
||||
public static final ParseField KEY_FIELD = new ParseField("key");
|
||||
public static final ParseField FROM_FIELD = new ParseField("from");
|
||||
public static final ParseField TO_FIELD = new ParseField("to");
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
public abstract class SignificanceHeuristic implements NamedWriteable<SignificanceHeuristic>, ToXContent {
|
||||
public abstract class SignificanceHeuristic implements NamedWriteable, ToXContent {
|
||||
/**
|
||||
* @param subsetFreq The frequency of the term in the selected sample
|
||||
* @param subsetSize The size of the selected sample (typically number of docs)
|
||||
|
|
|
@ -45,7 +45,7 @@ import java.util.Set;
|
|||
|
||||
public abstract class TermsAggregator extends BucketsAggregator {
|
||||
|
||||
public static class BucketCountThresholds implements Writeable<BucketCountThresholds>, ToXContent {
|
||||
public static class BucketCountThresholds implements Writeable, ToXContent {
|
||||
private long minDocCount;
|
||||
private long shardMinDocCount;
|
||||
private int requiredSize;
|
||||
|
|
|
@ -71,7 +71,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
public TermsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
super(in, StringTerms.TYPE, ValuesSourceType.ANY);
|
||||
bucketCountThresholds = new BucketCountThresholds(in);
|
||||
collectMode = SubAggCollectionMode.BREADTH_FIRST.readFrom(in);
|
||||
collectMode = SubAggCollectionMode.readFromStream(in);
|
||||
executionHint = in.readOptionalString();
|
||||
includeExclude = in.readOptionalWriteable(IncludeExclude::new);
|
||||
order = InternalOrder.Streams.readOrder(in);
|
||||
|
|
|
@ -58,7 +58,7 @@ import java.util.TreeSet;
|
|||
* Defines the include/exclude regular expression filtering for string terms aggregation. In this filtering logic,
|
||||
* exclusion has precedence, where the {@code include} is evaluated first and then the {@code exclude}.
|
||||
*/
|
||||
public class IncludeExclude implements Writeable<IncludeExclude>, ToXContent {
|
||||
public class IncludeExclude implements Writeable, ToXContent {
|
||||
private static final ParseField INCLUDE_FIELD = new ParseField("include");
|
||||
private static final ParseField EXCLUDE_FIELD = new ParseField("exclude");
|
||||
private static final ParseField PATTERN_FIELD = new ParseField("pattern");
|
||||
|
|
|
@ -64,7 +64,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
keyed = in.readBoolean();
|
||||
numberOfSignificantValueDigits = in.readVInt();
|
||||
compression = in.readDouble();
|
||||
method = PercentilesMethod.TDIGEST.readFrom(in);
|
||||
method = PercentilesMethod.readFromStream(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -64,7 +64,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
keyed = in.readBoolean();
|
||||
numberOfSignificantValueDigits = in.readVInt();
|
||||
compression = in.readDouble();
|
||||
method = PercentilesMethod.TDIGEST.readFrom(in);
|
||||
method = PercentilesMethod.readFromStream(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* An enum representing the methods for calculating percentiles
|
||||
*/
|
||||
public enum PercentilesMethod implements Writeable<PercentilesMethod> {
|
||||
public enum PercentilesMethod implements Writeable {
|
||||
/**
|
||||
* The TDigest method for calculating percentiles
|
||||
*/
|
||||
|
@ -51,8 +51,7 @@ public enum PercentilesMethod implements Writeable<PercentilesMethod> {
|
|||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PercentilesMethod readFrom(StreamInput in) throws IOException {
|
||||
public static PercentilesMethod readFromStream(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown PercentilesMethod ordinal [" + ordinal + "]");
|
||||
|
|
|
@ -37,7 +37,7 @@ import java.util.Objects;
|
|||
* specific type.
|
||||
*/
|
||||
public abstract class PipelineAggregatorBuilder<PAB extends PipelineAggregatorBuilder<PAB>> extends ToXContentToBytes
|
||||
implements NamedWriteable<PipelineAggregatorBuilder<PAB>> {
|
||||
implements NamedWriteable {
|
||||
|
||||
/**
|
||||
* Field shared by many parsers.
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class MovAvgModel implements NamedWriteable<MovAvgModel>, ToXContent {
|
||||
public abstract class MovAvgModel implements NamedWriteable, ToXContent {
|
||||
|
||||
/**
|
||||
* Should this model be fit to the data via a cost minimizing algorithm by default?
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.io.IOException;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public enum ValueType implements Writeable<ValueType> {
|
||||
public enum ValueType implements Writeable {
|
||||
|
||||
STRING((byte) 1, "string", "string", ValuesSourceType.BYTES,
|
||||
IndexFieldData.class, DocValueFormat.RAW),
|
||||
|
@ -96,8 +96,8 @@ public enum ValueType implements Writeable<ValueType> {
|
|||
private final byte id;
|
||||
private String preferredName;
|
||||
|
||||
private ValueType(byte id, String description, String preferredName, ValuesSourceType valuesSourceType, Class<? extends IndexFieldData> fieldDataType,
|
||||
DocValueFormat defaultFormat) {
|
||||
private ValueType(byte id, String description, String preferredName, ValuesSourceType valuesSourceType,
|
||||
Class<? extends IndexFieldData> fieldDataType, DocValueFormat defaultFormat) {
|
||||
this.id = id;
|
||||
this.description = description;
|
||||
this.preferredName = preferredName;
|
||||
|
|
|
@ -19,34 +19,9 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/*
|
||||
* The ordinal values for this class are tested in ValuesSourceTypeTests to
|
||||
* ensure that the ordinal for each value does not change and break bwc
|
||||
*/
|
||||
public enum ValuesSourceType implements Writeable<ValuesSourceType> {
|
||||
|
||||
public enum ValuesSourceType {
|
||||
ANY,
|
||||
NUMERIC,
|
||||
BYTES,
|
||||
GEOPOINT;
|
||||
|
||||
@Override
|
||||
public ValuesSourceType readFrom(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown ValuesSourceType ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ import java.util.Objects;
|
|||
*
|
||||
* @see org.elasticsearch.action.search.SearchRequest#source(SearchSourceBuilder)
|
||||
*/
|
||||
public final class SearchSourceBuilder extends ToXContentToBytes implements Writeable<SearchSourceBuilder> {
|
||||
public final class SearchSourceBuilder extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public static final ParseField FROM_FIELD = new ParseField("from");
|
||||
public static final ParseField SIZE_FIELD = new ParseField("size");
|
||||
|
@ -1264,7 +1264,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
}
|
||||
|
||||
public static class ScriptField implements Writeable<ScriptField>, ToXContent {
|
||||
public static class ScriptField implements Writeable, ToXContent {
|
||||
|
||||
private final boolean ignoreFailure;
|
||||
private final String fieldName;
|
||||
|
|
|
@ -46,8 +46,7 @@ import static org.elasticsearch.common.xcontent.ObjectParser.fromList;
|
|||
* This abstract class holds parameters shared by {@link HighlightBuilder} and {@link HighlightBuilder.Field}
|
||||
* and provides the common setters, equality, hashCode calculation and common serialization
|
||||
*/
|
||||
public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterBuilder<?>> extends ToXContentToBytes
|
||||
implements Writeable<HB> {
|
||||
public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterBuilder<?>> extends ToXContentToBytes implements Writeable {
|
||||
public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags");
|
||||
public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags");
|
||||
public static final ParseField FIELDS_FIELD = new ParseField("fields");
|
||||
|
|
|
@ -494,7 +494,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
}
|
||||
}
|
||||
|
||||
public enum Order implements Writeable<Order> {
|
||||
public enum Order implements Writeable {
|
||||
NONE, SCORE;
|
||||
|
||||
public static Order readFromStream(StreamInput in) throws IOException {
|
||||
|
|
|
@ -80,6 +80,9 @@ public class CollectorResult implements ToXContent, Writeable {
|
|||
this.children = children;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public CollectorResult(StreamInput in) throws IOException {
|
||||
this.collectorName = in.readString();
|
||||
this.reason = in.readString();
|
||||
|
@ -92,6 +95,17 @@ public class CollectorResult implements ToXContent, Writeable {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(collectorName);
|
||||
out.writeString(reason);
|
||||
out.writeLong(time);
|
||||
out.writeVInt(children.size());
|
||||
for (CollectorResult child : children) {
|
||||
child.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the profiled time for this collector (inclusive of children)
|
||||
*/
|
||||
|
@ -137,20 +151,4 @@ public class CollectorResult implements ToXContent, Writeable {
|
|||
builder = builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(collectorName);
|
||||
out.writeString(reason);
|
||||
out.writeLong(time);
|
||||
out.writeVInt(children.size());
|
||||
for (CollectorResult child : children) {
|
||||
child.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object readFrom(StreamInput in) throws IOException {
|
||||
return new CollectorResult(in);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ import java.util.stream.Collectors;
|
|||
* A container class to hold all the profile results across all shards. Internally
|
||||
* holds a map of shard ID -> Profiled results
|
||||
*/
|
||||
public final class InternalProfileShardResults implements Writeable<InternalProfileShardResults>, ToXContent{
|
||||
public final class InternalProfileShardResults implements Writeable, ToXContent{
|
||||
|
||||
private Map<String, List<ProfileShardResult>> shardResults;
|
||||
|
||||
|
@ -75,11 +75,6 @@ public final class InternalProfileShardResults implements Writeable<InternalProf
|
|||
return this.shardResults;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalProfileShardResults readFrom(StreamInput in) throws IOException {
|
||||
return new InternalProfileShardResults(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeInt(shardResults.size());
|
||||
|
|
|
@ -43,7 +43,7 @@ import java.util.Map;
|
|||
* Each InternalProfileResult has a List of InternalProfileResults, which will contain
|
||||
* "children" queries if applicable
|
||||
*/
|
||||
final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
||||
final class ProfileResult implements Writeable, ToXContent {
|
||||
|
||||
private static final ParseField QUERY_TYPE = new ParseField("query_type");
|
||||
private static final ParseField LUCENE_DESCRIPTION = new ParseField("lucene");
|
||||
|
@ -57,7 +57,8 @@ final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
|||
private final long nodeTime;
|
||||
private final List<ProfileResult> children;
|
||||
|
||||
public ProfileResult(String queryType, String luceneDescription, Map<String, Long> timings, List<ProfileResult> children, long nodeTime) {
|
||||
public ProfileResult(String queryType, String luceneDescription, Map<String, Long> timings, List<ProfileResult> children,
|
||||
long nodeTime) {
|
||||
this.queryType = queryType;
|
||||
this.luceneDescription = luceneDescription;
|
||||
this.timings = timings;
|
||||
|
@ -65,6 +66,9 @@ final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
|||
this.nodeTime = nodeTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ProfileResult(StreamInput in) throws IOException{
|
||||
this.queryType = in.readString();
|
||||
this.luceneDescription = in.readString();
|
||||
|
@ -84,6 +88,22 @@ final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(queryType);
|
||||
out.writeString(luceneDescription);
|
||||
out.writeLong(nodeTime); // not Vlong because can be negative
|
||||
out.writeVInt(timings.size());
|
||||
for (Map.Entry<String, Long> entry : timings.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeLong(entry.getValue());
|
||||
}
|
||||
out.writeVInt(children.size());
|
||||
for (ProfileResult child : children) {
|
||||
child.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the lucene description of this query (e.g. the "explain" text)
|
||||
*/
|
||||
|
@ -121,27 +141,6 @@ final class ProfileResult implements Writeable<ProfileResult>, ToXContent {
|
|||
return Collections.unmodifiableList(children);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProfileResult readFrom(StreamInput in) throws IOException {
|
||||
return new ProfileResult(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(queryType);
|
||||
out.writeString(luceneDescription);
|
||||
out.writeLong(nodeTime); // not Vlong because can be negative
|
||||
out.writeVInt(timings.size());
|
||||
for (Map.Entry<String, Long> entry : timings.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeLong(entry.getValue());
|
||||
}
|
||||
out.writeVInt(children.size());
|
||||
for (ProfileResult child : children) {
|
||||
child.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder = builder.startObject()
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.List;
|
|||
* A container class to hold the profile results for a single shard in the request.
|
||||
* Contains a list of query profiles, a collector tree and a total rewrite tree.
|
||||
*/
|
||||
public final class ProfileShardResult implements Writeable<ProfileShardResult>, ToXContent {
|
||||
public final class ProfileShardResult implements Writeable, ToXContent {
|
||||
|
||||
private final List<ProfileResult> profileResults;
|
||||
|
||||
|
@ -50,6 +50,9 @@ public final class ProfileShardResult implements Writeable<ProfileShardResult>,
|
|||
this.rewriteTime = rewriteTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ProfileShardResult(StreamInput in) throws IOException {
|
||||
int profileSize = in.readVInt();
|
||||
profileResults = new ArrayList<>(profileSize);
|
||||
|
@ -61,6 +64,17 @@ public final class ProfileShardResult implements Writeable<ProfileShardResult>,
|
|||
rewriteTime = in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(profileResults.size());
|
||||
for (ProfileResult p : profileResults) {
|
||||
p.writeTo(out);
|
||||
}
|
||||
profileCollector.writeTo(out);
|
||||
out.writeLong(rewriteTime);
|
||||
}
|
||||
|
||||
|
||||
public List<ProfileResult> getQueryResults() {
|
||||
return Collections.unmodifiableList(profileResults);
|
||||
}
|
||||
|
@ -86,20 +100,4 @@ public final class ProfileShardResult implements Writeable<ProfileShardResult>,
|
|||
builder.endArray();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProfileShardResult readFrom(StreamInput in) throws IOException {
|
||||
return new ProfileShardResult(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(profileResults.size());
|
||||
for (ProfileResult p : profileResults) {
|
||||
p.writeTo(out);
|
||||
}
|
||||
profileCollector.writeTo(out);
|
||||
out.writeLong(rewriteTime);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public enum QueryRescoreMode implements Writeable<QueryRescoreMode> {
|
||||
public enum QueryRescoreMode implements Writeable {
|
||||
Avg {
|
||||
@Override
|
||||
public float combine(float primary, float secondary) {
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* The abstract base builder for instances of {@link RescoreBuilder}.
|
||||
*/
|
||||
public abstract class RescoreBuilder<RB extends RescoreBuilder<RB>> extends ToXContentToBytes implements NamedWriteable<RB> {
|
||||
public abstract class RescoreBuilder<RB extends RescoreBuilder<RB>> extends ToXContentToBytes implements NamedWriteable {
|
||||
|
||||
protected Integer windowSize;
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ import java.util.Objects;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class SearchAfterBuilder implements ToXContent, Writeable<SearchAfterBuilder> {
|
||||
public class SearchAfterBuilder implements ToXContent, Writeable {
|
||||
public static final ParseField SEARCH_AFTER = new ParseField("search_after");
|
||||
private static final Object[] EMPTY_SORT_VALUES = new Object[0];
|
||||
|
||||
|
|
|
@ -396,7 +396,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
return NAME;
|
||||
}
|
||||
|
||||
public enum ScriptSortType implements Writeable<ScriptSortType> {
|
||||
public enum ScriptSortType implements Writeable {
|
||||
/** script sort for a string value **/
|
||||
STRING,
|
||||
/** script sort for a numeric value **/
|
||||
|
|
|
@ -48,7 +48,7 @@ import static java.util.Collections.unmodifiableMap;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class SortBuilder<T extends SortBuilder<?>> extends ToXContentToBytes implements NamedWriteable<T> {
|
||||
public abstract class SortBuilder<T extends SortBuilder<T>> extends ToXContentToBytes implements NamedWriteable {
|
||||
|
||||
protected SortOrder order = SortOrder.ASC;
|
||||
public static final ParseField ORDER_FIELD = new ParseField("order");
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.util.Objects;
|
|||
* <li>median - Use the median of all values as sort value. Only applicable for number based array fields.</li>
|
||||
* </ul>
|
||||
*/
|
||||
public enum SortMode implements Writeable<SortMode> {
|
||||
public enum SortMode implements Writeable {
|
||||
/** pick the lowest value **/
|
||||
MIN,
|
||||
/** pick the highest value **/
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Locale;
|
|||
*
|
||||
*
|
||||
*/
|
||||
public enum SortOrder implements Writeable<SortOrder> {
|
||||
public enum SortOrder implements Writeable {
|
||||
/**
|
||||
* Ascending order.
|
||||
*/
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* An enum representing the valid sorting options
|
||||
*/
|
||||
public enum SortBy implements Writeable<SortBy> {
|
||||
public enum SortBy implements Writeable {
|
||||
/** Sort should first be based on score, then document frequency and then the term itself. */
|
||||
SCORE,
|
||||
/** Sort should first be based on document frequency, then score and then the term itself. */
|
||||
|
|
|
@ -46,7 +46,7 @@ import java.util.Objects;
|
|||
* Suggesting works by suggesting terms/phrases that appear in the suggest text that are similar compared
|
||||
* to the terms in provided text. These suggestions are based on several options described in this class.
|
||||
*/
|
||||
public class SuggestBuilder extends ToXContentToBytes implements Writeable<SuggestBuilder> {
|
||||
public class SuggestBuilder extends ToXContentToBytes implements Writeable {
|
||||
protected static final ParseField GLOBAL_TEXT_FIELD = new ParseField("text");
|
||||
|
||||
private String globalText;
|
||||
|
|
|
@ -43,7 +43,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* Base class for the different suggestion implementations.
|
||||
*/
|
||||
public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends ToXContentToBytes implements NamedWriteable<T> {
|
||||
public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends ToXContentToBytes implements NamedWriteable {
|
||||
|
||||
protected final String field;
|
||||
protected String text;
|
||||
|
|
|
@ -39,7 +39,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* Fuzzy options for completion suggester
|
||||
*/
|
||||
public class FuzzyOptions implements ToXContent, Writeable<FuzzyOptions> {
|
||||
public class FuzzyOptions implements ToXContent, Writeable {
|
||||
static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy");
|
||||
private static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions");
|
||||
private static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length");
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* Regular expression options for completion suggester
|
||||
*/
|
||||
public class RegexOptions implements ToXContent, Writeable<RegexOptions> {
|
||||
public class RegexOptions implements ToXContent, Writeable {
|
||||
static final ParseField REGEX_OPTIONS = new ParseField("regex");
|
||||
private static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value");
|
||||
private static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states");
|
||||
|
|
|
@ -40,8 +40,7 @@ import java.util.Objects;
|
|||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public final class DirectCandidateGeneratorBuilder
|
||||
implements CandidateGenerator {
|
||||
public final class DirectCandidateGeneratorBuilder implements CandidateGenerator {
|
||||
|
||||
private static final String TYPE = "direct_generator";
|
||||
|
||||
|
|
|
@ -708,7 +708,7 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
|
|||
/**
|
||||
* {@link CandidateGenerator} interface.
|
||||
*/
|
||||
public interface CandidateGenerator extends Writeable<CandidateGenerator>, ToXContent {
|
||||
public interface CandidateGenerator extends Writeable, ToXContent {
|
||||
String getType();
|
||||
|
||||
PhraseSuggestionContext.DirectCandidateGenerator build(MapperService mapperService) throws IOException;
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public abstract class SmoothingModel implements NamedWriteable<SmoothingModel>, ToXContent {
|
||||
public abstract class SmoothingModel implements NamedWriteable, ToXContent {
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue