Rename some version constants to handle changes in core (elastic/x-pack-elasticsearch#1575)

Handle core renaming some constants.

Original commit: elastic/x-pack-elasticsearch@6db55e0225
This commit is contained in:
Nik Everett 2017-05-26 18:36:48 -04:00 committed by GitHub
parent 592af606f8
commit 4e39bbb84a
18 changed files with 33 additions and 33 deletions

View File

@ -740,6 +740,6 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
}
static boolean nodeSupportsJobVersion(Version nodeVersion, Version jobVersion) {
return nodeVersion.onOrAfter(Version.V_5_5_0_UNRELEASED);
return nodeVersion.onOrAfter(Version.V_5_5_0);
}
}

View File

@ -50,7 +50,7 @@ public enum DatafeedState implements Task.Status {
public void writeTo(StreamOutput out) throws IOException {
DatafeedState state = this;
// STARTING & STOPPING states were introduced in v5.5.
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().before(Version.V_5_5_0)) {
if (this == STARTING) {
state = STOPPED;
} else if (this == STOPPING) {

View File

@ -187,7 +187,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
public Job(StreamInput in) throws IOException {
jobId = in.readString();
jobType = in.readString();
if (in.getVersion().onOrAfter(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
jobVersion = in.readBoolean() ? Version.readVersion(in) : null;
} else {
jobVersion = null;
@ -396,7 +396,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
public void writeTo(StreamOutput out) throws IOException {
out.writeString(jobId);
out.writeString(jobType);
if (out.getVersion().onOrAfter(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
if (jobVersion != null) {
out.writeBoolean(true);
Version.writeVersion(jobVersion, out);
@ -613,7 +613,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
public Builder(StreamInput in) throws IOException {
id = in.readOptionalString();
jobType = in.readString();
if (in.getVersion().onOrAfter(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
jobVersion = in.readBoolean() ? Version.readVersion(in) : null;
}
description = in.readOptionalString();
@ -756,7 +756,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(id);
out.writeString(jobType);
if (out.getVersion().onOrAfter(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
if (jobVersion != null) {
out.writeBoolean(true);
Version.writeVersion(jobVersion, out);

View File

@ -37,7 +37,7 @@ public enum JobState implements ToXContent, Writeable {
public void writeTo(StreamOutput out) throws IOException {
JobState state = this;
// Pre v5.5 the OPENING state didn't exist
if (this == OPENING && out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (this == OPENING && out.getVersion().before(Version.V_5_5_0)) {
state = CLOSED;
}
out.writeEnum(state);

View File

@ -158,7 +158,7 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
public AnomalyRecord(StreamInput in) throws IOException {
jobId = in.readString();
// bwc for removed sequenceNum field
if (in.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().before(Version.V_5_5_0)) {
in.readInt();
}
detectorIndex = in.readInt();
@ -196,7 +196,7 @@ public class AnomalyRecord extends ToXContentToBytes implements Writeable {
public void writeTo(StreamOutput out) throws IOException {
out.writeString(jobId);
// bwc for removed sequenceNum field
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().before(Version.V_5_5_0)) {
out.writeInt(0);
}
out.writeInt(detectorIndex);

View File

@ -122,7 +122,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
bucketSpan = in.readLong();
initialAnomalyScore = in.readDouble();
// bwc for recordCount
if (in.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().before(Version.V_5_5_0)) {
in.readInt();
}
records = in.readList(AnomalyRecord::new);
@ -131,7 +131,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
bucketInfluencers = in.readList(BucketInfluencer::new);
processingTimeMs = in.readLong();
// bwc for perPartitionMaxProbability
if (in.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().before(Version.V_5_5_0)) {
in.readGenericValue();
}
partitionScores = in.readList(PartitionScore::new);
@ -145,7 +145,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
out.writeLong(bucketSpan);
out.writeDouble(initialAnomalyScore);
// bwc for recordCount
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().before(Version.V_5_5_0)) {
out.writeInt(0);
}
out.writeList(records);
@ -154,7 +154,7 @@ public class Bucket extends ToXContentToBytes implements Writeable {
out.writeList(bucketInfluencers);
out.writeLong(processingTimeMs);
// bwc for perPartitionMaxProbability
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().before(Version.V_5_5_0)) {
out.writeGenericValue(Collections.emptyMap());
}
out.writeList(partitionScores);

View File

@ -100,7 +100,7 @@ public class BucketInfluencer extends ToXContentToBytes implements Writeable {
timestamp = new Date(in.readLong());
bucketSpan = in.readLong();
// bwc for removed sequenceNum field
if (in.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().before(Version.V_5_5_0)) {
in.readInt();
}
}
@ -117,7 +117,7 @@ public class BucketInfluencer extends ToXContentToBytes implements Writeable {
out.writeLong(timestamp.getTime());
out.writeLong(bucketSpan);
// bwc for removed sequenceNum field
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().before(Version.V_5_5_0)) {
out.writeInt(0);
}
}

View File

@ -100,7 +100,7 @@ public class Influencer extends ToXContentToBytes implements Writeable {
isInterim = in.readBoolean();
bucketSpan = in.readLong();
// bwc for removed sequenceNum field
if (in.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().before(Version.V_5_5_0)) {
in.readInt();
}
}
@ -117,7 +117,7 @@ public class Influencer extends ToXContentToBytes implements Writeable {
out.writeBoolean(isInterim);
out.writeLong(bucketSpan);
// bwc for removed sequenceNum field
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().before(Version.V_5_5_0)) {
out.writeInt(0);
}
}

View File

@ -98,7 +98,7 @@ public class ModelPlot extends ToXContentToBytes implements Writeable {
public ModelPlot(StreamInput in) throws IOException {
jobId = in.readString();
// timestamp isn't optional in v5.5
if (in.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().before(Version.V_5_5_0)) {
if (in.readBoolean()) {
timestamp = new Date(in.readLong());
} else {
@ -108,7 +108,7 @@ public class ModelPlot extends ToXContentToBytes implements Writeable {
timestamp = new Date(in.readLong());
}
// bwc for removed id field
if (in.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().before(Version.V_5_5_0)) {
in.readOptionalString();
}
partitionFieldName = in.readOptionalString();
@ -122,7 +122,7 @@ public class ModelPlot extends ToXContentToBytes implements Writeable {
modelUpper = in.readDouble();
modelMedian = in.readDouble();
actual = in.readDouble();
if (in.getVersion().onOrAfter(Version.V_5_5_0_UNRELEASED)) {
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
bucketSpan = in.readLong();
} else {
bucketSpan = 0;
@ -133,7 +133,7 @@ public class ModelPlot extends ToXContentToBytes implements Writeable {
public void writeTo(StreamOutput out) throws IOException {
out.writeString(jobId);
// timestamp isn't optional in v5.5
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().before(Version.V_5_5_0)) {
boolean hasTimestamp = timestamp != null;
out.writeBoolean(hasTimestamp);
if (hasTimestamp) {
@ -143,7 +143,7 @@ public class ModelPlot extends ToXContentToBytes implements Writeable {
out.writeLong(timestamp.getTime());
}
// bwc for removed id field
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().before(Version.V_5_5_0)) {
out.writeOptionalString(null);
}
out.writeOptionalString(partitionFieldName);
@ -157,7 +157,7 @@ public class ModelPlot extends ToXContentToBytes implements Writeable {
out.writeDouble(modelUpper);
out.writeDouble(modelMedian);
out.writeDouble(actual);
if (out.getVersion().onOrAfter(Version.V_5_5_0_UNRELEASED)) {
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
out.writeLong(bucketSpan);
}
}

View File

@ -216,7 +216,7 @@ public final class TokenService extends AbstractComponent {
} else {
// the token exists and the value is at least as long as we'd expect
final Version version = Version.readVersion(in);
if (version.before(Version.V_5_5_0_UNRELEASED)) {
if (version.before(Version.V_5_5_0)) {
listener.onResponse(null);
} else {
final BytesKey decodedSalt = new BytesKey(in.readByteArray());

View File

@ -151,7 +151,7 @@ public class NativeRealmMigrator implements IndexLifecycleManager.IndexDataMigra
* does the right thing.
*/
private boolean shouldConvertDefaultPasswords(@Nullable Version previousVersion) {
return previousVersion != null && previousVersion.before(Version.V_6_0_0_alpha1_UNRELEASED);
return previousVersion != null && previousVersion.before(Version.V_6_0_0_alpha1);
}
@SuppressWarnings("unused")

View File

@ -11,7 +11,7 @@ import org.elasticsearch.xpack.security.support.MetadataUtils;
public class BeatsSystemUser extends User {
public static final String NAME = "beats_system";
private static final String ROLE_NAME = "beats_system";
public static final Version DEFINED_SINCE = Version.V_6_0_0_alpha1_UNRELEASED;
public static final Version DEFINED_SINCE = Version.V_6_0_0_alpha1;
public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE);
public BeatsSystemUser(boolean enabled) {

View File

@ -92,7 +92,7 @@ public class WatcherStatsResponse extends BaseNodesResponse<WatcherStatsResponse
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha1_UNRELEASED)) {
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha1)) {
super.readFrom(in);
watcherMetaData = new WatcherMetaData(in.readBoolean());
} else {

View File

@ -61,7 +61,7 @@ public class DatafeedStateTests extends ESTestCase {
assertEquals(DatafeedState.STARTED, enumCaptor.getValue());
// POST 5.5 enums a written as is
when(out.getVersion()).thenReturn(Version.V_5_5_0_UNRELEASED);
when(out.getVersion()).thenReturn(Version.V_5_5_0);
DatafeedState.STARTING.writeTo(out);
assertEquals(DatafeedState.STARTING, enumCaptor.getValue());

View File

@ -79,7 +79,7 @@ public class JobStateTests extends ESTestCase {
JobState.OPENING.writeTo(out);
assertEquals(JobState.CLOSED, enumCaptor.getValue());
when(out.getVersion()).thenReturn(Version.V_5_5_0_UNRELEASED);
when(out.getVersion()).thenReturn(Version.V_5_5_0);
doAnswer(new Answer<Void>() {
@Override

View File

@ -399,8 +399,8 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
public void testGetCompatibleJobTypes_givenVersionAfter_V_5_4() {
assertThat(Job.getCompatibleJobTypes(Version.V_5_4_0), contains(Job.ANOMALY_DETECTOR_JOB_TYPE));
assertThat(Job.getCompatibleJobTypes(Version.V_5_4_0).size(), equalTo(1));
assertThat(Job.getCompatibleJobTypes(Version.V_5_5_0_UNRELEASED), contains(Job.ANOMALY_DETECTOR_JOB_TYPE));
assertThat(Job.getCompatibleJobTypes(Version.V_5_5_0_UNRELEASED).size(), equalTo(1));
assertThat(Job.getCompatibleJobTypes(Version.V_5_5_0), contains(Job.ANOMALY_DETECTOR_JOB_TYPE));
assertThat(Job.getCompatibleJobTypes(Version.V_5_5_0).size(), equalTo(1));
}
public static Job.Builder buildJobBuilder(String id, Date date) {

View File

@ -155,7 +155,7 @@ public class NativeRealmMigratorTests extends ESTestCase {
}
public void testNoChangeOnUpgradeAfterV5_3() throws Exception {
verifyUpgrade(randomFrom(Version.V_6_0_0_alpha1_UNRELEASED), null, false);
verifyUpgrade(randomFrom(Version.V_6_0_0_alpha1), null, false);
}
public void testDisableLogstashBeatsAndConvertPasswordsOnUpgradeFromVersionPriorToV5_2() throws Exception {

View File

@ -369,6 +369,6 @@ public class ReservedRealmTests extends ESTestCase {
assertThat(versionPredicate.test(Version.V_5_2_0), is(true));
break;
}
assertThat(versionPredicate.test(Version.V_6_0_0_alpha1_UNRELEASED), is(true));
assertThat(versionPredicate.test(Version.V_6_0_0_alpha1), is(true));
}
}