Internal: Change version constant names for already released versions (#23416)
We have many version constants in master that have already been released, but are still marked (by naming convention) as unreleased. This commit renames those version constants.
This commit is contained in:
parent
1c44f4d62b
commit
dc0e93ed62
|
@ -984,11 +984,11 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
STATUS_EXCEPTION(org.elasticsearch.ElasticsearchStatusException.class, org.elasticsearch.ElasticsearchStatusException::new, 145,
|
||||
UNKNOWN_VERSION_ADDED),
|
||||
TASK_CANCELLED_EXCEPTION(org.elasticsearch.tasks.TaskCancelledException.class,
|
||||
org.elasticsearch.tasks.TaskCancelledException::new, 146, Version.V_5_1_1_UNRELEASED),
|
||||
org.elasticsearch.tasks.TaskCancelledException::new, 146, Version.V_5_1_1),
|
||||
SHARD_LOCK_OBTAIN_FAILED_EXCEPTION(org.elasticsearch.env.ShardLockObtainFailedException.class,
|
||||
org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2),
|
||||
UNKNOWN_NAMED_OBJECT_EXCEPTION(org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException.class,
|
||||
org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException::new, 148, Version.V_5_2_0_UNRELEASED);
|
||||
org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException::new, 148, Version.V_5_2_0);
|
||||
|
||||
final Class<? extends ElasticsearchException> exceptionClass;
|
||||
final CheckedFunction<StreamInput, ? extends ElasticsearchException, IOException> constructor;
|
||||
|
|
|
@ -100,18 +100,18 @@ public class Version implements Comparable<Version> {
|
|||
public static final int V_5_0_3_ID_UNRELEASED = 5000399;
|
||||
public static final Version V_5_0_3_UNRELEASED = new Version(V_5_0_3_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0);
|
||||
// no version constant for 5.1.0 due to inadvertent release
|
||||
public static final int V_5_1_1_ID_UNRELEASED = 5010199;
|
||||
public static final Version V_5_1_1_UNRELEASED = new Version(V_5_1_1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0);
|
||||
public static final int V_5_1_2_ID_UNRELEASED = 5010299;
|
||||
public static final Version V_5_1_2_UNRELEASED = new Version(V_5_1_2_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0);
|
||||
public static final int V_5_1_1_ID = 5010199;
|
||||
public static final Version V_5_1_1 = new Version(V_5_1_1_ID, org.apache.lucene.util.Version.LUCENE_6_3_0);
|
||||
public static final int V_5_1_2_ID = 5010299;
|
||||
public static final Version V_5_1_2 = new Version(V_5_1_2_ID, org.apache.lucene.util.Version.LUCENE_6_3_0);
|
||||
public static final int V_5_1_3_ID_UNRELEASED = 5010399;
|
||||
public static final Version V_5_1_3_UNRELEASED = new Version(V_5_1_3_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0);
|
||||
public static final int V_5_2_0_ID_UNRELEASED = 5020099;
|
||||
public static final Version V_5_2_0_UNRELEASED = new Version(V_5_2_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_0);
|
||||
public static final int V_5_2_1_ID_UNRELEASED = 5020199;
|
||||
public static final Version V_5_2_1_UNRELEASED = new Version(V_5_2_1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1);
|
||||
public static final int V_5_2_2_ID_UNRELEASED = 5020299;
|
||||
public static final Version V_5_2_2_UNRELEASED = new Version(V_5_2_2_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1);
|
||||
public static final int V_5_2_0_ID = 5020099;
|
||||
public static final Version V_5_2_0 = new Version(V_5_2_0_ID, org.apache.lucene.util.Version.LUCENE_6_4_0);
|
||||
public static final int V_5_2_1_ID = 5020199;
|
||||
public static final Version V_5_2_1 = new Version(V_5_2_1_ID, org.apache.lucene.util.Version.LUCENE_6_4_1);
|
||||
public static final int V_5_2_2_ID = 5020299;
|
||||
public static final Version V_5_2_2 = new Version(V_5_2_2_ID, org.apache.lucene.util.Version.LUCENE_6_4_1);
|
||||
public static final int V_5_2_3_ID_UNRELEASED = 5020399;
|
||||
public static final Version V_5_2_3_UNRELEASED = new Version(V_5_2_3_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1);
|
||||
public static final int V_5_3_0_ID_UNRELEASED = 5030099;
|
||||
|
@ -142,18 +142,18 @@ public class Version implements Comparable<Version> {
|
|||
return V_5_3_0_UNRELEASED;
|
||||
case V_5_2_3_ID_UNRELEASED:
|
||||
return V_5_2_3_UNRELEASED;
|
||||
case V_5_2_2_ID_UNRELEASED:
|
||||
return V_5_2_2_UNRELEASED;
|
||||
case V_5_2_1_ID_UNRELEASED:
|
||||
return V_5_2_1_UNRELEASED;
|
||||
case V_5_2_0_ID_UNRELEASED:
|
||||
return V_5_2_0_UNRELEASED;
|
||||
case V_5_2_2_ID:
|
||||
return V_5_2_2;
|
||||
case V_5_2_1_ID:
|
||||
return V_5_2_1;
|
||||
case V_5_2_0_ID:
|
||||
return V_5_2_0;
|
||||
case V_5_1_3_ID_UNRELEASED:
|
||||
return V_5_1_3_UNRELEASED;
|
||||
case V_5_1_2_ID_UNRELEASED:
|
||||
return V_5_1_2_UNRELEASED;
|
||||
case V_5_1_1_ID_UNRELEASED:
|
||||
return V_5_1_1_UNRELEASED;
|
||||
case V_5_1_2_ID:
|
||||
return V_5_1_2;
|
||||
case V_5_1_1_ID:
|
||||
return V_5_1_1;
|
||||
case V_5_0_3_ID_UNRELEASED:
|
||||
return V_5_0_3_UNRELEASED;
|
||||
case V_5_0_2_ID:
|
||||
|
|
|
@ -249,8 +249,8 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest<ClusterAl
|
|||
}
|
||||
|
||||
private void checkVersion(Version version) {
|
||||
if (version.before(Version.V_5_2_0_UNRELEASED)) {
|
||||
throw new IllegalArgumentException("cannot explain shards in a mixed-cluster with pre-" + Version.V_5_2_0_UNRELEASED +
|
||||
if (version.before(Version.V_5_2_0)) {
|
||||
throw new IllegalArgumentException("cannot explain shards in a mixed-cluster with pre-" + Version.V_5_2_0 +
|
||||
" nodes, node version [" + version + "]");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -134,7 +134,7 @@ public class ClusterSearchShardsRequest extends MasterNodeReadRequest<ClusterSea
|
|||
routing = in.readOptionalString();
|
||||
preference = in.readOptionalString();
|
||||
|
||||
if (in.getVersion().onOrBefore(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrBefore(Version.V_5_1_1)) {
|
||||
//types
|
||||
in.readStringArray();
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ public class ClusterSearchShardsRequest extends MasterNodeReadRequest<ClusterSea
|
|||
out.writeOptionalString(routing);
|
||||
out.writeOptionalString(preference);
|
||||
|
||||
if (out.getVersion().onOrBefore(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrBefore(Version.V_5_1_1)) {
|
||||
//types
|
||||
out.writeStringArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class ClusterSearchShardsResponse extends ActionResponse implements ToXCo
|
|||
for (int i = 0; i < nodes.length; i++) {
|
||||
nodes[i] = new DiscoveryNode(in);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
int size = in.readVInt();
|
||||
indicesAndFilters = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -94,7 +94,7 @@ public class ClusterSearchShardsResponse extends ActionResponse implements ToXCo
|
|||
for (DiscoveryNode node : nodes) {
|
||||
node.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeVInt(indicesAndFilters.size());
|
||||
for (Map.Entry<String, AliasFilter> entry : indicesAndFilters.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
|
|
|
@ -117,7 +117,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
|
|||
startOffset = in.readInt();
|
||||
endOffset = in.readInt();
|
||||
position = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
Integer len = in.readOptionalVInt();
|
||||
if (len != null) {
|
||||
positionLength = len;
|
||||
|
@ -137,7 +137,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
|
|||
out.writeInt(startOffset);
|
||||
out.writeInt(endOffset);
|
||||
out.writeVInt(position);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeOptionalVInt(positionLength > 1 ? positionLength : null);
|
||||
}
|
||||
out.writeOptionalString(type);
|
||||
|
|
|
@ -402,7 +402,7 @@ public abstract class AbstractBulkByScrollRequest<Self extends AbstractBulkByScr
|
|||
retryBackoffInitialTime = new TimeValue(in);
|
||||
maxRetries = in.readVInt();
|
||||
requestsPerSecond = in.readFloat();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
slices = in.readVInt();
|
||||
} else {
|
||||
slices = 1;
|
||||
|
@ -421,12 +421,12 @@ public abstract class AbstractBulkByScrollRequest<Self extends AbstractBulkByScr
|
|||
retryBackoffInitialTime.writeTo(out);
|
||||
out.writeVInt(maxRetries);
|
||||
out.writeFloat(requestsPerSecond);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeVInt(slices);
|
||||
} else {
|
||||
if (slices > 1) {
|
||||
throw new IllegalArgumentException("Attempting to send sliced reindex-style request to a node that doesn't support "
|
||||
+ "it. Version is [" + out.getVersion() + "] but must be [" + Version.V_5_1_1_UNRELEASED + "]");
|
||||
+ "it. Version is [" + out.getVersion() + "] but must be [" + Version.V_5_1_1 + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -189,7 +189,7 @@ public abstract class BulkByScrollTask extends CancellableTask {
|
|||
}
|
||||
|
||||
public Status(StreamInput in) throws IOException {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
sliceId = in.readOptionalVInt();
|
||||
} else {
|
||||
sliceId = null;
|
||||
|
@ -207,7 +207,7 @@ public abstract class BulkByScrollTask extends CancellableTask {
|
|||
requestsPerSecond = in.readFloat();
|
||||
reasonCancelled = in.readOptionalString();
|
||||
throttledUntil = new TimeValue(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
sliceStatuses = in.readList(stream -> stream.readOptionalWriteable(StatusOrException::new));
|
||||
} else {
|
||||
sliceStatuses = emptyList();
|
||||
|
@ -216,7 +216,7 @@ public abstract class BulkByScrollTask extends CancellableTask {
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeOptionalVInt(sliceId);
|
||||
}
|
||||
out.writeVLong(total);
|
||||
|
@ -232,7 +232,7 @@ public abstract class BulkByScrollTask extends CancellableTask {
|
|||
out.writeFloat(requestsPerSecond);
|
||||
out.writeOptionalString(reasonCancelled);
|
||||
throttledUntil.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeVInt(sliceStatuses.size());
|
||||
for (StatusOrException sliceStatus : sliceStatuses) {
|
||||
out.writeOptionalWriteable(sliceStatus);
|
||||
|
|
|
@ -323,14 +323,14 @@ public abstract class FieldStats<T> implements Writeable, ToXContent {
|
|||
out.writeLong(sumTotalTermFreq);
|
||||
out.writeBoolean(isSearchable);
|
||||
out.writeBoolean(isAggregatable);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeBoolean(hasMinMax);
|
||||
if (hasMinMax) {
|
||||
writeMinMax(out);
|
||||
}
|
||||
} else {
|
||||
assert hasMinMax : "cannot serialize null min/max fieldstats in a mixed-cluster " +
|
||||
"with pre-" + Version.V_5_2_0_UNRELEASED + " nodes, remote version [" + out.getVersion() + "]";
|
||||
"with pre-" + Version.V_5_2_0 + " nodes, remote version [" + out.getVersion() + "]";
|
||||
writeMinMax(out);
|
||||
}
|
||||
}
|
||||
|
@ -705,7 +705,7 @@ public abstract class FieldStats<T> implements Writeable, ToXContent {
|
|||
boolean isSearchable = in.readBoolean();
|
||||
boolean isAggregatable = in.readBoolean();
|
||||
boolean hasMinMax = true;
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
hasMinMax = in.readBoolean();
|
||||
}
|
||||
switch (type) {
|
||||
|
|
|
@ -93,7 +93,7 @@ public class FieldStatsResponse extends BroadcastResponse {
|
|||
for (Map.Entry<String, Map<String, FieldStats>> entry1 : indicesMergedFieldStats.entrySet()) {
|
||||
out.writeString(entry1.getKey());
|
||||
int size = entry1.getValue().size();
|
||||
if (out.getVersion().before(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().before(Version.V_5_2_0)) {
|
||||
// filter fieldstats without min/max information
|
||||
for (FieldStats stats : entry1.getValue().values()) {
|
||||
if (stats.hasMinMax() == false) {
|
||||
|
@ -103,7 +103,7 @@ public class FieldStatsResponse extends BroadcastResponse {
|
|||
}
|
||||
out.writeVInt(size);
|
||||
for (Map.Entry<String, FieldStats> entry2 : entry1.getValue().entrySet()) {
|
||||
if (entry2.getValue().hasMinMax() || out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (entry2.getValue().hasMinMax() || out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeString(entry2.getKey());
|
||||
entry2.getValue().writeTo(out);
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ public class FieldStatsShardResponse extends BroadcastShardResponse {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
final Map<String, FieldStats<?> > stats;
|
||||
if (out.getVersion().before(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().before(Version.V_5_2_0)) {
|
||||
/**
|
||||
* FieldStats with null min/max are not (de)serializable in versions prior to {@link Version.V_5_2_0_UNRELEASED}
|
||||
*/
|
||||
|
|
|
@ -554,7 +554,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
out.writeBytesReference(source);
|
||||
out.writeByte(opType.getId());
|
||||
// ES versions below 5.1.2 don't know about resolveVersionDefaults but resolve the version eagerly (which messes with validation).
|
||||
if (out.getVersion().before(Version.V_5_1_2_UNRELEASED)) {
|
||||
if (out.getVersion().before(Version.V_5_1_2)) {
|
||||
out.writeLong(resolveVersionDefaults());
|
||||
} else {
|
||||
out.writeLong(version);
|
||||
|
|
|
@ -40,7 +40,7 @@ public class SnapshotDeletionsInProgress extends AbstractNamedDiffable<Custom> i
|
|||
|
||||
public static final String TYPE = "snapshot_deletions";
|
||||
// the version where SnapshotDeletionsInProgress was introduced
|
||||
public static final Version VERSION_INTRODUCED = Version.V_5_2_0_UNRELEASED;
|
||||
public static final Version VERSION_INTRODUCED = Version.V_5_2_0;
|
||||
|
||||
// the list of snapshot deletion request entries
|
||||
private final List<Entry> entries;
|
||||
|
|
|
@ -51,7 +51,7 @@ public class SnapshotsInProgress extends AbstractNamedDiffable<Custom> implement
|
|||
// a snapshot in progress from a pre 5.2.x node
|
||||
public static final long UNDEFINED_REPOSITORY_STATE_ID = -2L;
|
||||
// the version where repository state ids were introduced
|
||||
private static final Version REPOSITORY_ID_INTRODUCED_VERSION = Version.V_5_2_0_UNRELEASED;
|
||||
private static final Version REPOSITORY_ID_INTRODUCED_VERSION = Version.V_5_2_0;
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
|
|
|
@ -80,7 +80,7 @@ public class NodeAllocationResult implements ToXContent, Writeable, Comparable<N
|
|||
public NodeAllocationResult(StreamInput in) throws IOException {
|
||||
node = new DiscoveryNode(in);
|
||||
shardStoreInfo = in.readOptionalWriteable(ShardStoreInfo::new);
|
||||
if (in.getVersion().before(Version.V_5_2_1_UNRELEASED)) {
|
||||
if (in.getVersion().before(Version.V_5_2_1)) {
|
||||
canAllocateDecision = Decision.readFrom(in);
|
||||
} else {
|
||||
canAllocateDecision = in.readOptionalWriteable(Decision::readFrom);
|
||||
|
@ -93,7 +93,7 @@ public class NodeAllocationResult implements ToXContent, Writeable, Comparable<N
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
node.writeTo(out);
|
||||
out.writeOptionalWriteable(shardStoreInfo);
|
||||
if (out.getVersion().before(Version.V_5_2_1_UNRELEASED)) {
|
||||
if (out.getVersion().before(Version.V_5_2_1)) {
|
||||
if (canAllocateDecision == null) {
|
||||
Decision.NO.writeTo(out);
|
||||
} else {
|
||||
|
|
|
@ -212,7 +212,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
name = in.readOptionalString();
|
||||
nestedPath = in.readOptionalString();
|
||||
parentChildType = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
ignoreUnmapped = in.readBoolean();
|
||||
}
|
||||
from = in.readVInt();
|
||||
|
@ -253,7 +253,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
out.writeOptionalString(name);
|
||||
out.writeOptionalString(nestedPath);
|
||||
out.writeOptionalString(parentChildType);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeBoolean(ignoreUnmapped);
|
||||
}
|
||||
out.writeVInt(from);
|
||||
|
|
|
@ -212,11 +212,11 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
autoGeneratePhraseQueries = in.readBoolean();
|
||||
allowLeadingWildcard = in.readOptionalBoolean();
|
||||
analyzeWildcard = in.readOptionalBoolean();
|
||||
if (in.getVersion().before(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().before(Version.V_5_1_1)) {
|
||||
in.readBoolean(); // lowercase_expanded_terms
|
||||
}
|
||||
enablePositionIncrements = in.readBoolean();
|
||||
if (in.getVersion().before(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().before(Version.V_5_1_1)) {
|
||||
in.readString(); // locale
|
||||
}
|
||||
fuzziness = new Fuzziness(in);
|
||||
|
@ -232,7 +232,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
timeZone = in.readOptionalTimeZone();
|
||||
escape = in.readBoolean();
|
||||
maxDeterminizedStates = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
splitOnWhitespace = in.readBoolean();
|
||||
useAllFields = in.readOptionalBoolean();
|
||||
} else {
|
||||
|
@ -256,11 +256,11 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
out.writeBoolean(this.autoGeneratePhraseQueries);
|
||||
out.writeOptionalBoolean(this.allowLeadingWildcard);
|
||||
out.writeOptionalBoolean(this.analyzeWildcard);
|
||||
if (out.getVersion().before(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().before(Version.V_5_1_1)) {
|
||||
out.writeBoolean(true); // lowercase_expanded_terms
|
||||
}
|
||||
out.writeBoolean(this.enablePositionIncrements);
|
||||
if (out.getVersion().before(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().before(Version.V_5_1_1)) {
|
||||
out.writeString(Locale.ROOT.toLanguageTag()); // locale
|
||||
}
|
||||
this.fuzziness.writeTo(out);
|
||||
|
@ -276,7 +276,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
out.writeOptionalTimeZone(timeZone);
|
||||
out.writeBoolean(this.escape);
|
||||
out.writeVInt(this.maxDeterminizedStates);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeBoolean(this.splitOnWhitespace);
|
||||
out.writeOptionalBoolean(this.useAllFields);
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||
if (formatString != null) {
|
||||
format = Joda.forPattern(formatString);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
String relationString = in.readOptionalString();
|
||||
if (relationString != null) {
|
||||
relation = ShapeRelation.getRelationByName(relationString);
|
||||
|
@ -133,7 +133,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||
formatString = this.format.format();
|
||||
}
|
||||
out.writeOptionalString(formatString);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
String relationString = null;
|
||||
if (this.relation != null) {
|
||||
relationString = this.relation.getRelationName();
|
||||
|
|
|
@ -157,19 +157,19 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
flags = in.readInt();
|
||||
analyzer = in.readOptionalString();
|
||||
defaultOperator = Operator.readFromStream(in);
|
||||
if (in.getVersion().before(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().before(Version.V_5_1_1)) {
|
||||
in.readBoolean(); // lowercase_expanded_terms
|
||||
}
|
||||
settings.lenient(in.readBoolean());
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
this.lenientSet = in.readBoolean();
|
||||
}
|
||||
settings.analyzeWildcard(in.readBoolean());
|
||||
if (in.getVersion().before(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().before(Version.V_5_1_1)) {
|
||||
in.readString(); // locale
|
||||
}
|
||||
minimumShouldMatch = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
settings.quoteFieldSuffix(in.readOptionalString());
|
||||
useAllFields = in.readOptionalBoolean();
|
||||
}
|
||||
|
@ -186,19 +186,19 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
out.writeInt(flags);
|
||||
out.writeOptionalString(analyzer);
|
||||
defaultOperator.writeTo(out);
|
||||
if (out.getVersion().before(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().before(Version.V_5_1_1)) {
|
||||
out.writeBoolean(true); // lowercase_expanded_terms
|
||||
}
|
||||
out.writeBoolean(settings.lenient());
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeBoolean(lenientSet);
|
||||
}
|
||||
out.writeBoolean(settings.analyzeWildcard());
|
||||
if (out.getVersion().before(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().before(Version.V_5_1_1)) {
|
||||
out.writeString(Locale.ROOT.toLanguageTag()); // locale
|
||||
}
|
||||
out.writeOptionalString(minimumShouldMatch);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeOptionalString(settings.quoteFieldSuffix());
|
||||
out.writeOptionalBoolean(useAllFields);
|
||||
}
|
||||
|
|
|
@ -106,7 +106,7 @@ public class RefreshStats implements Streamable, ToXContent {
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
total = in.readVLong();
|
||||
totalTimeInMillis = in.readVLong();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
listeners = in.readVInt();
|
||||
} else {
|
||||
listeners = 0;
|
||||
|
@ -117,7 +117,7 @@ public class RefreshStats implements Streamable, ToXContent {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(total);
|
||||
out.writeVLong(totalTimeInMillis);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeVInt(listeners);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ public class OsStats implements Writeable, ToXContent {
|
|||
this.cpu = new Cpu(in);
|
||||
this.mem = new Mem(in);
|
||||
this.swap = new Swap(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
this.cgroup = in.readOptionalWriteable(Cgroup::new);
|
||||
} else {
|
||||
this.cgroup = null;
|
||||
|
@ -65,7 +65,7 @@ public class OsStats implements Writeable, ToXContent {
|
|||
cpu.writeTo(out);
|
||||
mem.writeTo(out);
|
||||
swap.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeOptionalWriteable(cgroup);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -496,7 +496,7 @@ public final class Script implements ToXContentObject, Writeable {
|
|||
// Version 5.1 to 5.3 (exclusive) requires all Script members to be non-null and supports the potential
|
||||
// for more options than just XContentType. Reorders the read in contents to be in
|
||||
// same order as the constructor.
|
||||
} else if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
} else if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
this.type = ScriptType.readFrom(in);
|
||||
this.lang = in.readString();
|
||||
|
||||
|
@ -565,7 +565,7 @@ public final class Script implements ToXContentObject, Writeable {
|
|||
// Version 5.1 to 5.3 (exclusive) requires all Script members to be non-null and supports the potential
|
||||
// for more options than just XContentType. Reorders the written out contents to be in
|
||||
// same order as the constructor.
|
||||
} else if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
} else if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
type.writeTo(out);
|
||||
|
||||
if (lang == null) {
|
||||
|
|
|
@ -430,7 +430,7 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
} else {
|
||||
excludeValues = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
incNumPartitions = in.readVInt();
|
||||
incZeroBasedPartition = in.readVInt();
|
||||
} else {
|
||||
|
@ -463,7 +463,7 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
out.writeBytesRef(value);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeVInt(incNumPartitions);
|
||||
out.writeVInt(incZeroBasedPartition);
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ public final class AliasFilter implements Writeable {
|
|||
|
||||
public AliasFilter(StreamInput input) throws IOException {
|
||||
aliases = input.readStringArray();
|
||||
if (input.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (input.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
filter = input.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
reparseAliases = false;
|
||||
} else {
|
||||
|
@ -88,7 +88,7 @@ public final class AliasFilter implements Writeable {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeStringArray(aliases);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeOptionalNamedWriteable(filter);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -177,7 +177,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
source = in.readOptionalWriteable(SearchSourceBuilder::new);
|
||||
types = in.readStringArray();
|
||||
aliasFilter = new AliasFilter(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
indexBoost = in.readFloat();
|
||||
} else {
|
||||
// Nodes < 5.2.0 doesn't send index boost. Read it from source.
|
||||
|
@ -205,7 +205,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
out.writeOptionalWriteable(source);
|
||||
out.writeStringArray(types);
|
||||
aliasFilter.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeFloat(indexBoost);
|
||||
}
|
||||
if (!asKey) {
|
||||
|
|
|
@ -68,7 +68,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
private static final String TOTAL_SHARDS = "total_shards";
|
||||
private static final String SUCCESSFUL_SHARDS = "successful_shards";
|
||||
|
||||
private static final Version VERSION_INCOMPATIBLE_INTRODUCED = Version.V_5_2_0_UNRELEASED;
|
||||
private static final Version VERSION_INCOMPATIBLE_INTRODUCED = Version.V_5_2_0;
|
||||
|
||||
private final SnapshotId snapshotId;
|
||||
|
||||
|
|
|
@ -970,7 +970,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
try (StreamInput in = decoded.streamInput()) {
|
||||
//randomize the version across released and unreleased ones
|
||||
Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
in.setVersion(version);
|
||||
ElasticsearchException exception = new ElasticsearchException(in);
|
||||
assertEquals("test message", exception.getMessage());
|
||||
|
|
|
@ -41,7 +41,6 @@ import static org.hamcrest.Matchers.containsString;
|
|||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
||||
public class VersionTests extends ESTestCase {
|
||||
|
@ -100,7 +99,7 @@ public class VersionTests extends ESTestCase {
|
|||
public void testMinimumIndexCompatibilityVersion() {
|
||||
assertEquals(Version.V_5_0_0, Version.V_6_0_0_alpha1_UNRELEASED.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.V_2_0_0, Version.V_5_0_0.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.V_2_0_0, Version.V_5_1_1_UNRELEASED.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.V_2_0_0, Version.V_5_1_1.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.V_2_0_0, Version.V_5_0_0_alpha1.minimumIndexCompatibilityVersion());
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ public class ClusterSearchShardsResponseTests extends ESTestCase {
|
|||
assertEquals(clusterSearchShardsGroup.getShardId(), deserializedGroup.getShardId());
|
||||
assertArrayEquals(clusterSearchShardsGroup.getShards(), deserializedGroup.getShards());
|
||||
}
|
||||
if (version.onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (version.onOrAfter(Version.V_5_1_1)) {
|
||||
assertEquals(clusterSearchShardsResponse.getIndicesAndFilters(), deserialized.getIndicesAndFilters());
|
||||
} else {
|
||||
assertNull(deserialized.getIndicesAndFilters());
|
||||
|
|
|
@ -51,7 +51,7 @@ public class PutStoredScriptRequestTests extends ESTestCase {
|
|||
public void testSerializationBwc() throws IOException {
|
||||
final byte[] rawStreamBytes = Base64.getDecoder().decode("ADwDCG11c3RhY2hlAQZzY3JpcHQCe30A");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(rawStreamBytes)) {
|
||||
in.setVersion(version);
|
||||
PutStoredScriptRequest serialized = new PutStoredScriptRequest();
|
||||
|
|
|
@ -52,7 +52,7 @@ public class CreateIndexRequestTests extends ESTestCase {
|
|||
public void testSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("ADwDAANmb28APAMBB215X3R5cGULeyJ0eXBlIjp7fX0AAAD////+AA==");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
CreateIndexRequest serialized = new CreateIndexRequest();
|
||||
|
|
|
@ -94,7 +94,7 @@ public class PutMappingRequestTests extends ESTestCase {
|
|||
public void testSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("ADwDAQNmb28MAA8tLS0KZm9vOiAiYmFyIgoAPAMAAAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
PutMappingRequest request = new PutMappingRequest();
|
||||
|
|
|
@ -89,7 +89,7 @@ public class PutIndexTemplateRequestTests extends ESTestCase {
|
|||
public void testPutIndexTemplateRequestSerializationXContentBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("ADwDAANmb28IdGVtcGxhdGUAAAAAAAABA2Jhcg8tLS0KZm9vOiAiYmFyIgoAAAAAAAAAAAAAAAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
PutIndexTemplateRequest request = new PutIndexTemplateRequest();
|
||||
|
|
|
@ -74,7 +74,7 @@ public class BulkByScrollTaskStatusTests extends ESTestCase {
|
|||
assertEquals(expected.getRequestsPerSecond(), actual.getRequestsPerSecond(), 0f);
|
||||
assertEquals(expected.getReasonCancelled(), actual.getReasonCancelled());
|
||||
assertEquals(expected.getThrottledUntil(), actual.getThrottledUntil());
|
||||
if (version.onOrAfter(Version.V_5_1_1_UNRELEASED)) {
|
||||
if (version.onOrAfter(Version.V_5_1_1)) {
|
||||
assertThat(actual.getSliceStatuses(), hasSize(expected.getSliceStatuses().size()));
|
||||
for (int i = 0; i < expected.getSliceStatuses().size(); i++) {
|
||||
BulkByScrollTask.StatusOrException sliceStatus = expected.getSliceStatuses().get(i);
|
||||
|
|
|
@ -113,7 +113,7 @@ public class FieldStatsRequestTests extends ESTestCase {
|
|||
FieldStatsShardResponse deserialized = new FieldStatsShardResponse();
|
||||
deserialized.readFrom(input);
|
||||
final Map<String, FieldStats<?>> expected;
|
||||
if (version.before(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (version.before(Version.V_5_2_0)) {
|
||||
expected = deserialized.filterNullMinMax();
|
||||
} else {
|
||||
expected = deserialized.getFieldStats();
|
||||
|
|
|
@ -177,7 +177,7 @@ public class IndexRequestTests extends ESTestCase {
|
|||
public void testIndexRequestXContentSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("AAD////+AgQDZm9vAAAAAQNiYXIBATEAAAAAAnt9AP/////////9AAAA//////////8AAAAAAAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
IndexRequest serialized = new IndexRequest();
|
||||
|
|
|
@ -49,7 +49,7 @@ public class PutPipelineRequestTests extends ESTestCase {
|
|||
public void testSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("ADwDATECe30=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
PutPipelineRequest request = new PutPipelineRequest();
|
||||
|
|
|
@ -74,7 +74,7 @@ public class SimulatePipelineRequestTests extends ESTestCase {
|
|||
public void testSerializationWithXContentBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("AAAAAnt9AAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
SimulatePipelineRequest request = new SimulatePipelineRequest();
|
||||
|
|
|
@ -269,7 +269,7 @@ public class TermVectorsUnitTests extends ESTestCase {
|
|||
public void testStreamRequestWithXContentBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("AAABBWluZGV4BHR5cGUCaWQBAnt9AAABDnNvbWVQcmVmZXJlbmNlFgAAAAEA//////////0AAAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
TermVectorsRequest request = new TermVectorsRequest();
|
||||
|
|
|
@ -805,7 +805,7 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
assertTrue("If we're not compatible with 5.1.1 we can drop the assertion below",
|
||||
Version.CURRENT.minimumCompatibilityVersion().onOrBefore(Version.V_5_1_1_UNRELEASED));
|
||||
Version.CURRENT.minimumCompatibilityVersion().onOrBefore(Version.V_5_1_1));
|
||||
/* Read -1 as serialized by a version of Elasticsearch that supported writing negative numbers with writeVLong. Note that this
|
||||
* should be the same test as the first case (when value is negative) but we've kept some bytes so no matter what we do to
|
||||
* writeVLong in the future we can be sure we can read bytes as written by Elasticsearch before 5.1.2 */
|
||||
|
|
|
@ -606,7 +606,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
public void testSerialization() throws IOException {
|
||||
for (Version version : new Version[] {Version.CURRENT, Version.V_5_0_1}){
|
||||
for (int i = 0; i < 20; i++) {
|
||||
assertSerialization(randomFieldStats(version.onOrAfter(Version.V_5_2_0_UNRELEASED)), version);
|
||||
assertSerialization(randomFieldStats(version.onOrAfter(Version.V_5_2_0)), version);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -308,7 +308,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
public void testItemSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("AQVpbmRleAEEdHlwZQEODXsiZm9vIjoiYmFyIn0A/wD//////////QAAAAAAAAAA");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
Item item = new Item(in);
|
||||
|
|
|
@ -37,10 +37,10 @@ public class RefreshStatsTests extends AbstractStreamableTestCase<RefreshStats>
|
|||
|
||||
public void testPre5Dot2() throws IOException {
|
||||
// We can drop the compatibility once the assertion just below this list fails
|
||||
assertTrue(Version.CURRENT.minimumCompatibilityVersion().before(Version.V_5_2_0_UNRELEASED));
|
||||
assertTrue(Version.CURRENT.minimumCompatibilityVersion().before(Version.V_5_2_0));
|
||||
|
||||
RefreshStats instance = createTestInstance();
|
||||
RefreshStats copied = copyInstance(instance, Version.V_5_1_1_UNRELEASED);
|
||||
RefreshStats copied = copyInstance(instance, Version.V_5_1_1);
|
||||
assertEquals(instance.getTotal(), copied.getTotal());
|
||||
assertEquals(instance.getTotalTimeInMillis(), copied.getTotalTimeInMillis());
|
||||
assertEquals(0, copied.getListeners());
|
||||
|
|
|
@ -55,7 +55,7 @@ public class PipelineConfigurationTests extends ESTestCase {
|
|||
public void testSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("ATECe30AAAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
PipelineConfiguration configuration = PipelineConfiguration.readFrom(in);
|
||||
|
|
|
@ -250,7 +250,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
|
|||
public void testSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("P4AAAAAFZmllbGQEdHlwZQAAAAAAAA57ImZvbyI6ImJhciJ9AAAAAA==");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
|
||||
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
PercolateQueryBuilder queryBuilder = new PercolateQueryBuilder(in);
|
||||
|
|
|
@ -90,7 +90,7 @@ public class RemoteInfo implements Writeable {
|
|||
headers.put(in.readString(), in.readString());
|
||||
}
|
||||
this.headers = unmodifiableMap(headers);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
socketTimeout = new TimeValue(in);
|
||||
connectTimeout = new TimeValue(in);
|
||||
} else {
|
||||
|
@ -112,7 +112,7 @@ public class RemoteInfo implements Writeable {
|
|||
out.writeString(header.getKey());
|
||||
out.writeString(header.getValue());
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
socketTimeout.writeTo(out);
|
||||
connectTimeout.writeTo(out);
|
||||
}
|
||||
|
|
|
@ -162,7 +162,7 @@ public class RoundTripTests extends ESTestCase {
|
|||
assertEquals(request.getRemoteInfo().getUsername(), tripped.getRemoteInfo().getUsername());
|
||||
assertEquals(request.getRemoteInfo().getPassword(), tripped.getRemoteInfo().getPassword());
|
||||
assertEquals(request.getRemoteInfo().getHeaders(), tripped.getRemoteInfo().getHeaders());
|
||||
if (version.onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
if (version.onOrAfter(Version.V_5_2_0)) {
|
||||
assertEquals(request.getRemoteInfo().getSocketTimeout(), tripped.getRemoteInfo().getSocketTimeout());
|
||||
assertEquals(request.getRemoteInfo().getConnectTimeout(), tripped.getRemoteInfo().getConnectTimeout());
|
||||
} else {
|
||||
|
|
Loading…
Reference in New Issue