Bump version to 6.0.0-beta1
This commit does two things: - bumps the version from 6.0.0-alpha3 to 6.0.0-beta1 - renames the 6.0.0-alpha3 version constant to 6.0.0-beta1 Relates #25621
This commit is contained in:
parent
c75ddd2c85
commit
c084542731
|
@ -1,5 +1,5 @@
|
|||
# When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
|
||||
elasticsearch = 6.0.0-alpha3
|
||||
elasticsearch = 6.0.0-beta1
|
||||
lucene = 7.0.0-snapshot-ad2cb77
|
||||
|
||||
# optional dependencies
|
||||
|
|
|
@ -94,10 +94,10 @@ public class Version implements Comparable<Version> {
|
|||
public static final int V_6_0_0_alpha2_ID = 6000002;
|
||||
public static final Version V_6_0_0_alpha2 =
|
||||
new Version(V_6_0_0_alpha2_ID, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
public static final int V_6_0_0_alpha3_ID = 6000003;
|
||||
public static final Version V_6_0_0_alpha3 =
|
||||
new Version(V_6_0_0_alpha3_ID, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
public static final Version CURRENT = V_6_0_0_alpha3;
|
||||
public static final int V_6_0_0_beta1_ID = 6000026;
|
||||
public static final Version V_6_0_0_beta1 =
|
||||
new Version(V_6_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
public static final Version CURRENT = V_6_0_0_beta1;
|
||||
|
||||
// unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT)
|
||||
|
||||
|
@ -112,8 +112,8 @@ public class Version implements Comparable<Version> {
|
|||
|
||||
public static Version fromId(int id) {
|
||||
switch (id) {
|
||||
case V_6_0_0_alpha3_ID:
|
||||
return V_6_0_0_alpha3;
|
||||
case V_6_0_0_beta1_ID:
|
||||
return V_6_0_0_beta1;
|
||||
case V_6_0_0_alpha2_ID:
|
||||
return V_6_0_0_alpha2;
|
||||
case V_6_0_0_alpha1_ID:
|
||||
|
|
|
@ -240,7 +240,7 @@ public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
|
|||
field = in.readOptionalString();
|
||||
explain = in.readBoolean();
|
||||
attributes = in.readStringArray();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
normalizer = in.readOptionalString();
|
||||
}
|
||||
}
|
||||
|
@ -256,7 +256,7 @@ public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
|
|||
out.writeOptionalString(field);
|
||||
out.writeBoolean(explain);
|
||||
out.writeStringArray(attributes);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
out.writeOptionalString(normalizer);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,10 +34,8 @@ import org.elasticsearch.env.Environment;
|
|||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModule;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.nio.file.Files;
|
||||
import java.util.List;
|
||||
|
||||
public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
@ -58,7 +56,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
|
||||
this.ignoreCase =
|
||||
settings.getAsBooleanLenientForPreEs6Indices(indexSettings.getIndexVersionCreated(), "ignore_case", false, deprecationLogger);
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_0_0_alpha3) && settings.get("ignore_case") != null) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_0_0_beta1) && settings.get("ignore_case") != null) {
|
||||
deprecationLogger.deprecated(
|
||||
"This tokenize synonyms with whatever tokenizer and token filters appear before it in the chain. " +
|
||||
"If you need ignore case with this filter, you should set lowercase filter before this");
|
||||
|
@ -68,7 +66,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
settings.getAsBooleanLenientForPreEs6Indices(indexSettings.getIndexVersionCreated(), "expand", true, deprecationLogger);
|
||||
|
||||
// for backward compatibility
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_6_0_0_alpha3)) {
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_6_0_0_beta1)) {
|
||||
String tokenizerName = settings.get("tokenizer", "whitespace");
|
||||
AnalysisModule.AnalysisProvider<TokenizerFactory> tokenizerFactoryFactory =
|
||||
analysisRegistry.getTokenizerProvider(tokenizerName, indexSettings);
|
||||
|
|
|
@ -131,7 +131,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
if (indexOptions() != IndexOptions.NONE) {
|
||||
failIfNotIndexed();
|
||||
BytesRef[] bytesRefs = new BytesRef[values.size()];
|
||||
final boolean is5xIndex = context.indexVersionCreated().before(Version.V_6_0_0_alpha3);
|
||||
final boolean is5xIndex = context.indexVersionCreated().before(Version.V_6_0_0_beta1);
|
||||
for (int i = 0; i < bytesRefs.length; i++) {
|
||||
BytesRef id;
|
||||
if (is5xIndex) {
|
||||
|
@ -166,7 +166,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
final IndexFieldData<?> fieldData = fieldDataBuilder.build(indexSettings, fieldType, cache, breakerService, mapperService);
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_6_0_0_alpha3)) {
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_6_0_0_beta1)) {
|
||||
// ids were indexed as utf-8
|
||||
return fieldData;
|
||||
}
|
||||
|
@ -288,7 +288,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
|
||||
if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
BytesRef id = Uid.encodeId(context.sourceToParse().id());
|
||||
fields.add(new Field(NAME, id, fieldType));
|
||||
} else {
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.common.compress.CompressedXContent;
|
|||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -805,7 +804,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
if (hasMapping(type) == false) {
|
||||
return null;
|
||||
}
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
assert indexSettings.isSingleType();
|
||||
return new Term(IdFieldMapper.NAME, Uid.encodeId(id));
|
||||
} else if (indexSettings.isSingleType()) {
|
||||
|
|
|
@ -158,7 +158,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
if (uidString.startsWith(expectedPrefix)) {
|
||||
String id = uidString.substring(expectedPrefix.length(), uidString.length());
|
||||
BytesRef encodedId;
|
||||
if (context.indexVersionCreated().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (context.indexVersionCreated().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
encodedId = Uid.encodeId(id);
|
||||
} else {
|
||||
encodedId = new BytesRef(id);
|
||||
|
|
|
@ -679,7 +679,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
doc.addDynamicMappingsUpdate(docMapper.getMapping());
|
||||
}
|
||||
Term uid;
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
uid = new Term(IdFieldMapper.NAME, Uid.encodeId(doc.id()));
|
||||
} else if (docMapper.getDocumentMapper().idFieldMapper().fieldType().indexOptions() != IndexOptions.NONE) {
|
||||
uid = new Term(IdFieldMapper.NAME, doc.id());
|
||||
|
@ -776,7 +776,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
|
||||
private Term extractUidForDelete(String type, String id) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
assert indexSettings.isSingleType();
|
||||
// This is only correct because we create types dynamically on delete operations
|
||||
// otherwise this could match the same _id from a different type
|
||||
|
|
|
@ -223,7 +223,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
final long minGenerationToRecoverFrom;
|
||||
if (checkpoint.minTranslogGeneration < 0) {
|
||||
final Version indexVersionCreated = indexSettings().getIndexVersionCreated();
|
||||
assert indexVersionCreated.before(Version.V_6_0_0_alpha3) :
|
||||
assert indexVersionCreated.before(Version.V_6_0_0_beta1) :
|
||||
"no minTranslogGeneration in checkpoint, but index was created with version [" + indexVersionCreated + "]";
|
||||
minGenerationToRecoverFrom = deletionPolicy.getMinTranslogGenerationForRecovery();
|
||||
} else {
|
||||
|
|
|
@ -100,7 +100,7 @@ public class TranslogStats extends ToXContentToBytes implements Streamable {
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
numberOfOperations = in.readVInt();
|
||||
translogSizeInBytes = in.readVLong();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
uncommittedOperations = in.readVInt();
|
||||
uncommittedSizeInBytes = in.readVLong();
|
||||
} else {
|
||||
|
@ -113,7 +113,7 @@ public class TranslogStats extends ToXContentToBytes implements Streamable {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(numberOfOperations);
|
||||
out.writeVLong(translogSizeInBytes);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
out.writeVInt(uncommittedOperations);
|
||||
out.writeVLong(uncommittedSizeInBytes);
|
||||
}
|
||||
|
|
|
@ -180,7 +180,7 @@ public final class SearchHits implements Streamable, ToXContent, Iterable<Search
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
final boolean hasTotalHits;
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
hasTotalHits = in.readBoolean();
|
||||
} else {
|
||||
hasTotalHits = true;
|
||||
|
@ -205,7 +205,7 @@ public final class SearchHits implements Streamable, ToXContent, Iterable<Search
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
final boolean hasTotalHits;
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
hasTotalHits = totalHits >= 0;
|
||||
out.writeBoolean(hasTotalHits);
|
||||
} else {
|
||||
|
|
|
@ -227,7 +227,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
collapse = in.readOptionalWriteable(CollapseBuilder::new);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
trackTotalHits = in.readBoolean();
|
||||
} else {
|
||||
trackTotalHits = true;
|
||||
|
@ -283,7 +283,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeOptionalWriteable(collapse);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
out.writeBoolean(trackTotalHits);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ grant codeBase "${codebase.mocksocket-1.2.jar}" {
|
|||
};
|
||||
|
||||
|
||||
grant codeBase "${codebase.rest-6.0.0-alpha3-SNAPSHOT.jar}" {
|
||||
grant codeBase "${codebase.rest-6.0.0-beta1-SNAPSHOT.jar}" {
|
||||
// rest makes socket connections for rest tests
|
||||
permission java.net.SocketPermission "*", "connect";
|
||||
};
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.Version.V_5_3_0;
|
||||
import static org.elasticsearch.Version.V_6_0_0_alpha3;
|
||||
import static org.elasticsearch.Version.V_6_0_0_beta1;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -46,30 +46,30 @@ import static org.hamcrest.Matchers.sameInstance;
|
|||
public class VersionTests extends ESTestCase {
|
||||
|
||||
public void testVersionComparison() throws Exception {
|
||||
assertThat(V_5_3_0.before(V_6_0_0_alpha3), is(true));
|
||||
assertThat(V_5_3_0.before(V_6_0_0_beta1), is(true));
|
||||
assertThat(V_5_3_0.before(V_5_3_0), is(false));
|
||||
assertThat(V_6_0_0_alpha3.before(V_5_3_0), is(false));
|
||||
assertThat(V_6_0_0_beta1.before(V_5_3_0), is(false));
|
||||
|
||||
assertThat(V_5_3_0.onOrBefore(V_6_0_0_alpha3), is(true));
|
||||
assertThat(V_5_3_0.onOrBefore(V_6_0_0_beta1), is(true));
|
||||
assertThat(V_5_3_0.onOrBefore(V_5_3_0), is(true));
|
||||
assertThat(V_6_0_0_alpha3.onOrBefore(V_5_3_0), is(false));
|
||||
assertThat(V_6_0_0_beta1.onOrBefore(V_5_3_0), is(false));
|
||||
|
||||
assertThat(V_5_3_0.after(V_6_0_0_alpha3), is(false));
|
||||
assertThat(V_5_3_0.after(V_6_0_0_beta1), is(false));
|
||||
assertThat(V_5_3_0.after(V_5_3_0), is(false));
|
||||
assertThat(V_6_0_0_alpha3.after(V_5_3_0), is(true));
|
||||
assertThat(V_6_0_0_beta1.after(V_5_3_0), is(true));
|
||||
|
||||
assertThat(V_5_3_0.onOrAfter(V_6_0_0_alpha3), is(false));
|
||||
assertThat(V_5_3_0.onOrAfter(V_6_0_0_beta1), is(false));
|
||||
assertThat(V_5_3_0.onOrAfter(V_5_3_0), is(true));
|
||||
assertThat(V_6_0_0_alpha3.onOrAfter(V_5_3_0), is(true));
|
||||
assertThat(V_6_0_0_beta1.onOrAfter(V_5_3_0), is(true));
|
||||
|
||||
assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1")));
|
||||
assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2")));
|
||||
assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24")));
|
||||
assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0")));
|
||||
|
||||
assertThat(V_5_3_0, is(lessThan(V_6_0_0_alpha3)));
|
||||
assertThat(V_5_3_0, is(lessThan(V_6_0_0_beta1)));
|
||||
assertThat(V_5_3_0.compareTo(V_5_3_0), is(0));
|
||||
assertThat(V_6_0_0_alpha3, is(greaterThan(V_5_3_0)));
|
||||
assertThat(V_6_0_0_beta1, is(greaterThan(V_5_3_0)));
|
||||
}
|
||||
|
||||
public void testMin() {
|
||||
|
@ -97,7 +97,7 @@ public class VersionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testMinimumIndexCompatibilityVersion() {
|
||||
assertEquals(Version.V_5_0_0, Version.V_6_0_0_alpha3.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.V_5_0_0, Version.V_6_0_0_beta1.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.fromId(2000099), Version.V_5_0_0.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.fromId(2000099),
|
||||
Version.V_5_1_1.minimumIndexCompatibilityVersion());
|
||||
|
@ -157,7 +157,7 @@ public class VersionTests extends ESTestCase {
|
|||
public void testIndexCreatedVersion() {
|
||||
// an actual index has a IndexMetaData.SETTING_INDEX_UUID
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_2,
|
||||
Version.V_5_2_0, Version.V_6_0_0_alpha3);
|
||||
Version.V_5_2_0, Version.V_6_0_0_beta1);
|
||||
assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build()));
|
||||
}
|
||||
|
||||
|
@ -171,10 +171,10 @@ public class VersionTests extends ESTestCase {
|
|||
// from 6.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is
|
||||
// released since we need to bump the supported minor in Version#minimumCompatibilityVersion()
|
||||
Version lastVersion = VersionUtils.getPreviousVersion(Version.V_6_0_0_alpha1);
|
||||
assertEquals(lastVersion.major, Version.V_6_0_0_alpha3.minimumCompatibilityVersion().major);
|
||||
assertEquals(lastVersion.major, Version.V_6_0_0_beta1.minimumCompatibilityVersion().major);
|
||||
assertEquals("did you miss to bump the minor in Version#minimumCompatibilityVersion()",
|
||||
lastVersion.minor, Version.V_6_0_0_alpha3.minimumCompatibilityVersion().minor);
|
||||
assertEquals(0, Version.V_6_0_0_alpha3.minimumCompatibilityVersion().revision);
|
||||
lastVersion.minor, Version.V_6_0_0_beta1.minimumCompatibilityVersion().minor);
|
||||
assertEquals(0, Version.V_6_0_0_beta1.minimumCompatibilityVersion().revision);
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
|
|
|
@ -97,7 +97,7 @@ public class AnalyzeRequestTests extends ESTestCase {
|
|||
// AnalyzeRequest serializedRequest = new AnalyzeRequest("foo");
|
||||
// serializedRequest.text("text");
|
||||
// serializedRequest.normalizer("normalizer");
|
||||
// Using Version.V_6_0_0_alpha3
|
||||
// Using Version.V_6_0_0_beta1
|
||||
final byte[] data = Base64.getDecoder().decode("AAABA2ZvbwEEdGV4dAAAAAAAAAABCm5vcm1hbGl6ZXI=");
|
||||
final Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_5_4_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
|
|
|
@ -200,7 +200,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
PercolateQueryBuilder(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
field = in.readString();
|
||||
if (in.getVersion().before(Version.V_6_0_0_alpha3)) {
|
||||
if (in.getVersion().before(Version.V_6_0_0_beta1)) {
|
||||
documentType = in.readString();
|
||||
} else {
|
||||
documentType = in.readOptionalString();
|
||||
|
@ -230,7 +230,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeString(field);
|
||||
if (out.getVersion().before(Version.V_6_0_0_alpha3)) {
|
||||
if (out.getVersion().before(Version.V_6_0_0_beta1)) {
|
||||
out.writeString(documentType);
|
||||
} else {
|
||||
out.writeOptionalString(documentType);
|
||||
|
|
|
@ -53,14 +53,14 @@ public class VersionUtilsTests extends ESTestCase {
|
|||
|
||||
// sub range
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
|
||||
Version.V_6_0_0_alpha3);
|
||||
Version.V_6_0_0_beta1);
|
||||
assertTrue(got.onOrAfter(Version.V_5_0_0));
|
||||
assertTrue(got.onOrBefore(Version.V_6_0_0_alpha3));
|
||||
assertTrue(got.onOrBefore(Version.V_6_0_0_beta1));
|
||||
|
||||
// unbounded lower
|
||||
got = VersionUtils.randomVersionBetween(random(), null, Version.V_6_0_0_alpha3);
|
||||
got = VersionUtils.randomVersionBetween(random(), null, Version.V_6_0_0_beta1);
|
||||
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
|
||||
assertTrue(got.onOrBefore(Version.V_6_0_0_alpha3));
|
||||
assertTrue(got.onOrBefore(Version.V_6_0_0_beta1));
|
||||
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allReleasedVersions().get(0));
|
||||
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
|
||||
assertTrue(got.onOrBefore(VersionUtils.allReleasedVersions().get(0)));
|
||||
|
@ -78,9 +78,9 @@ public class VersionUtilsTests extends ESTestCase {
|
|||
assertEquals(got, VersionUtils.getFirstVersion());
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT);
|
||||
assertEquals(got, Version.CURRENT);
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha3,
|
||||
Version.V_6_0_0_alpha3);
|
||||
assertEquals(got, Version.V_6_0_0_alpha3);
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_beta1,
|
||||
Version.V_6_0_0_beta1);
|
||||
assertEquals(got, Version.V_6_0_0_beta1);
|
||||
|
||||
// implicit range of one
|
||||
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion());
|
||||
|
@ -153,8 +153,8 @@ public class VersionUtilsTests extends ESTestCase {
|
|||
public static final Version V_5_4_0 = Version.fromString("5.4.0");
|
||||
public static final Version V_6_0_0_alpha1 = Version.fromString("6.0.0-alpha1");
|
||||
public static final Version V_6_0_0_alpha2 = Version.fromString("6.0.0-alpha2");
|
||||
public static final Version V_6_0_0_alpha3 = Version.fromString("6.0.0-alpha3");
|
||||
public static final Version CURRENT = V_6_0_0_alpha3;
|
||||
public static final Version V_6_0_0_beta1 = Version.fromString("6.0.0-beta1");
|
||||
public static final Version CURRENT = V_6_0_0_beta1;
|
||||
}
|
||||
|
||||
public void testResolveReleasedVersionsForUnstableBranch() {
|
||||
|
@ -164,7 +164,7 @@ public class VersionUtilsTests extends ESTestCase {
|
|||
List<Version> unreleased = t.v2();
|
||||
assertEquals(Arrays.asList(TestUnstableBranch.V_5_3_0, TestUnstableBranch.V_5_3_1,
|
||||
TestUnstableBranch.V_6_0_0_alpha1, TestUnstableBranch.V_6_0_0_alpha2), released);
|
||||
assertEquals(Arrays.asList(TestUnstableBranch.V_5_3_2, TestUnstableBranch.V_5_4_0, TestUnstableBranch.V_6_0_0_alpha3), unreleased);
|
||||
assertEquals(Arrays.asList(TestUnstableBranch.V_5_3_2, TestUnstableBranch.V_5_4_0, TestUnstableBranch.V_6_0_0_beta1), unreleased);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in New Issue