[BUG] Fix versioning issues discovered through version bump (#884)
This commit fixes up the version framework to ensure a smooth version bump across releases. Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
parent
18625952a9
commit
0317b604f6
|
@ -269,14 +269,17 @@ public class BwcVersions {
|
|||
unreleased.add(currentVersion);
|
||||
|
||||
// No unreleased versions for 1.0.0
|
||||
// todo remove this hack
|
||||
if (currentVersion.equals(Version.fromString("1.0.0"))) {
|
||||
return unmodifiableList(unreleased);
|
||||
}
|
||||
|
||||
// version 1 is the first release, there is no previous "unreleased version":
|
||||
// the tip of the previous major is unreleased for sure, be it a minor or a bugfix
|
||||
if (currentVersion.getMajor() != 1) {
|
||||
// the tip of the previous major is unreleased for sure, be it a minor or a bugfix
|
||||
final Version latestOfPreviousMajor = getLatestVersionByKey(this.groupByMajor, currentVersion.getMajor() - 1);
|
||||
final Version latestOfPreviousMajor = getLatestVersionByKey(
|
||||
this.groupByMajor,
|
||||
currentVersion.getMajor() == 1 ? 7 : currentVersion.getMajor() - 1
|
||||
);
|
||||
unreleased.add(latestOfPreviousMajor);
|
||||
if (latestOfPreviousMajor.getRevision() == 0) {
|
||||
// if the previous major is a x.y.0 release, then the tip of the minor before that (y-1) is also unreleased
|
||||
|
|
|
@ -32,6 +32,7 @@
|
|||
package org.opensearch.backwards;
|
||||
|
||||
import org.apache.http.HttpHost;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.client.Request;
|
||||
import org.opensearch.client.Response;
|
||||
|
@ -198,7 +199,7 @@ public class IndexingIT extends OpenSearchRestTestCase {
|
|||
final int numberOfInitialDocs = 1 + randomInt(5);
|
||||
logger.info("indexing [{}] docs initially", numberOfInitialDocs);
|
||||
numDocs += indexDocs(index, 0, numberOfInitialDocs);
|
||||
assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().major >= 6 ? numDocs : 0, newNodeClient);
|
||||
assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().onOrAfter(LegacyESVersion.V_6_0_0) ? numDocs : 0, newNodeClient);
|
||||
logger.info("allowing shards on all nodes");
|
||||
updateIndexSettings(index, Settings.builder().putNull("index.routing.allocation.include._name"));
|
||||
ensureGreen(index);
|
||||
|
@ -209,7 +210,7 @@ public class IndexingIT extends OpenSearchRestTestCase {
|
|||
final int numberOfDocsAfterAllowingShardsOnAllNodes = 1 + randomInt(5);
|
||||
logger.info("indexing [{}] docs after allowing shards on all nodes", numberOfDocsAfterAllowingShardsOnAllNodes);
|
||||
numDocs += indexDocs(index, numDocs, numberOfDocsAfterAllowingShardsOnAllNodes);
|
||||
assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().major >= 6 ? numDocs : 0, newNodeClient);
|
||||
assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().onOrAfter(LegacyESVersion.V_6_0_0) ? numDocs : 0, newNodeClient);
|
||||
Shard primary = buildShards(index, nodes, newNodeClient).stream().filter(Shard::isPrimary).findFirst().get();
|
||||
logger.info("moving primary to new node by excluding {}", primary.getNode().getNodeName());
|
||||
updateIndexSettings(index, Settings.builder().put("index.routing.allocation.exclude._name", primary.getNode().getNodeName()));
|
||||
|
@ -219,7 +220,8 @@ public class IndexingIT extends OpenSearchRestTestCase {
|
|||
logger.info("indexing [{}] docs after moving primary", numberOfDocsAfterMovingPrimary);
|
||||
numDocsOnNewPrimary += indexDocs(index, numDocs, numberOfDocsAfterMovingPrimary);
|
||||
numDocs += numberOfDocsAfterMovingPrimary;
|
||||
assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().major >= 6 ? numDocs : numDocsOnNewPrimary, newNodeClient);
|
||||
assertSeqNoOnShards(index, nodes,
|
||||
nodes.getBWCVersion().onOrAfter(LegacyESVersion.V_6_0_0) ? numDocs : numDocsOnNewPrimary, newNodeClient);
|
||||
/*
|
||||
* Dropping the number of replicas to zero, and then increasing it to one triggers a recovery thus exercising any BWC-logic in
|
||||
* the recovery code.
|
||||
|
@ -238,7 +240,8 @@ public class IndexingIT extends OpenSearchRestTestCase {
|
|||
for (Shard shard : buildShards(index, nodes, newNodeClient)) {
|
||||
assertCount(index, "_only_nodes:" + shard.node.nodeName, numDocs);
|
||||
}
|
||||
assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().major >= 6 ? numDocs : numDocsOnNewPrimary, newNodeClient);
|
||||
assertSeqNoOnShards(index, nodes,
|
||||
nodes.getBWCVersion().onOrAfter(LegacyESVersion.V_6_0_0) ? numDocs : numDocsOnNewPrimary, newNodeClient);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ public class VersionUtils {
|
|||
stableVersions = previousMajor;
|
||||
// remove current
|
||||
moveLastToUnreleased(currentMajor, unreleasedVersions);
|
||||
} else {
|
||||
} else if (current.major != 1) {
|
||||
// on a stable or release branch, ie N.x
|
||||
stableVersions = currentMajor;
|
||||
// remove the next maintenance bugfix
|
||||
|
@ -88,6 +88,8 @@ public class VersionUtils {
|
|||
// The latest minor in the previous major is a ".0" release, so there must be an unreleased bugfix for the minor before that
|
||||
moveLastToUnreleased(previousMajor, unreleasedVersions);
|
||||
}
|
||||
} else {
|
||||
stableVersions = currentMajor;
|
||||
}
|
||||
|
||||
// remove last minor unless the it's the first OpenSearch version.
|
||||
|
|
|
@ -950,15 +950,28 @@ public abstract class OpenSearchRestTestCase extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
protected static void expectSoftDeletesWarning(Request request, String indexName) {
|
||||
final List<String> expectedWarnings = Collections.singletonList(
|
||||
final List<String> esExpectedWarnings = Collections.singletonList(
|
||||
"Creating indices with soft-deletes disabled is deprecated and will be removed in future Elasticsearch versions. " +
|
||||
"Please do not specify value for setting [index.soft_deletes.enabled] of index [" + indexName + "].");
|
||||
final List<String> opensearchExpectedWarnings = Collections.singletonList(
|
||||
"Creating indices with soft-deletes disabled is deprecated and will be removed in future OpenSearch versions. " +
|
||||
"Please do not specify value for setting [index.soft_deletes.enabled] of index [" + indexName + "].");
|
||||
"Please do not specify value for setting [index.soft_deletes.enabled] of index [" + indexName + "].");
|
||||
final Builder requestOptions = RequestOptions.DEFAULT.toBuilder();
|
||||
if (nodeVersions.stream().allMatch(version -> version.onOrAfter(LegacyESVersion.V_7_6_0))) {
|
||||
requestOptions.setWarningsHandler(warnings -> warnings.equals(expectedWarnings) == false);
|
||||
if (nodeVersions.stream().allMatch(version -> version.onOrAfter(LegacyESVersion.V_7_6_0) && version.before(Version.V_1_0_0))) {
|
||||
requestOptions.setWarningsHandler(warnings -> warnings.equals(esExpectedWarnings) == false);
|
||||
request.setOptions(requestOptions);
|
||||
} else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(LegacyESVersion.V_7_6_0))) {
|
||||
requestOptions.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false);
|
||||
} else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(LegacyESVersion.V_7_6_0)
|
||||
&& version.before(Version.V_1_0_0))) {
|
||||
requestOptions.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(esExpectedWarnings) == false);
|
||||
request.setOptions(requestOptions);
|
||||
}
|
||||
|
||||
if (nodeVersions.stream().allMatch(version -> version.onOrAfter(Version.V_1_0_0))) {
|
||||
requestOptions.setWarningsHandler(warnings -> warnings.equals(opensearchExpectedWarnings) == false);
|
||||
request.setOptions(requestOptions);
|
||||
} else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(Version.V_1_0_0))) {
|
||||
requestOptions.setWarningsHandler(warnings -> warnings.isEmpty() == false
|
||||
&& warnings.equals(opensearchExpectedWarnings) == false);
|
||||
request.setOptions(requestOptions);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue