[Versioning] Rebase to OpenSearch version 1.0.0 (#555)

This commit rebases the versioning to OpenSearch 1.0.0

Co-authored-by: Rabi Panda <adnapibar@gmail.com>

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
Nick Knize 2021-04-15 17:06:47 -05:00 committed by GitHub
parent 4dde0f2a3b
commit 0ba0e7cc26
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
439 changed files with 1979 additions and 1731 deletions

View File

@ -102,7 +102,7 @@ class PluginBuildPlugin implements Plugin<Project> {
'name' : extension1.name,
'description' : extension1.description,
'version' : extension1.version,
'opensearchVersion': Version.fromString(VersionProperties.getOpenSearch()).toString(),
'opensearchVersion' : Version.fromString(VersionProperties.getOpenSearch()).toString(),
'javaVersion' : project.targetCompatibility as String,
'classname' : extension1.classname,
'extendedPlugins' : extension1.extendedPlugins.join(','),

View File

@ -98,7 +98,7 @@ import static java.util.Collections.unmodifiableList;
public class BwcVersions {
private static final Pattern LINE_PATTERN = Pattern.compile(
"\\W+public static final Version V_(\\d+)_(\\d+)_(\\d+)(_alpha\\d+|_beta\\d+|_rc\\d+)? .*"
"\\W+public static final (LegacyES)?Version V_(\\d+)_(\\d+)_(\\d+)(_alpha\\d+|_beta\\d+|_rc\\d+)? .*"
);
private final Version currentVersion;
@ -128,9 +128,9 @@ public class BwcVersions {
.filter(Matcher::matches)
.map(
match -> new Version(
Integer.parseInt(match.group(1)),
Integer.parseInt(match.group(2)),
Integer.parseInt(match.group(3))
Integer.parseInt(match.group(3)),
Integer.parseInt(match.group(4))
)
)
.collect(Collectors.toCollection(TreeSet::new)),
@ -144,12 +144,18 @@ public class BwcVersions {
throw new IllegalArgumentException("Could not parse any versions");
}
// hack: this is horribly volatile like this entire logic; fix
currentVersion = allVersions.last();
groupByMajor = allVersions.stream()
// We only care about the last 2 majors when it comes to BWC.
// It might take us time to remove the older ones from versionLines, so we allow them to exist.
.filter(version -> version.getMajor() > currentVersion.getMajor() - 2)
// Adjust the major number since OpenSearch 1.x is released after predecessor version 7.x
.filter(
version -> (version.getMajor() == 1 ? 7 : version.getMajor()) > (currentVersion.getMajor() == 1
? 7
: currentVersion.getMajor()) - 2
)
.collect(Collectors.groupingBy(Version::getMajor, Collectors.toList()));
assertCurrentVersionMatchesParsed(currentVersionProperty);
@ -262,14 +268,17 @@ public class BwcVersions {
// The current version is being worked, is always unreleased
unreleased.add(currentVersion);
// the tip of the previous major is unreleased for sure, be it a minor or a bugfix
final Version latestOfPreviousMajor = getLatestVersionByKey(this.groupByMajor, currentVersion.getMajor() - 1);
unreleased.add(latestOfPreviousMajor);
if (latestOfPreviousMajor.getRevision() == 0) {
// if the previous major is a x.y.0 release, then the tip of the minor before that (y-1) is also unreleased
final Version previousMinor = getLatestInMinor(latestOfPreviousMajor.getMajor(), latestOfPreviousMajor.getMinor() - 1);
if (previousMinor != null) {
unreleased.add(previousMinor);
// version 1 is the first release, there is no previous "unreleased version":
if (currentVersion.getMajor() != 1) {
// the tip of the previous major is unreleased for sure, be it a minor or a bugfix
final Version latestOfPreviousMajor = getLatestVersionByKey(this.groupByMajor, currentVersion.getMajor() - 1);
unreleased.add(latestOfPreviousMajor);
if (latestOfPreviousMajor.getRevision() == 0) {
// if the previous major is a x.y.0 release, then the tip of the minor before that (y-1) is also unreleased
final Version previousMinor = getLatestInMinor(latestOfPreviousMajor.getMajor(), latestOfPreviousMajor.getMinor() - 1);
if (previousMinor != null) {
unreleased.add(previousMinor);
}
}
}
@ -306,8 +315,9 @@ public class BwcVersions {
}
private Map<Integer, List<Version>> getReleasedMajorGroupedByMinor() {
List<Version> currentMajorVersions = groupByMajor.get(currentVersion.getMajor());
List<Version> previousMajorVersions = groupByMajor.get(currentVersion.getMajor() - 1);
int currentMajor = currentVersion.getMajor();
List<Version> currentMajorVersions = groupByMajor.get(currentMajor);
List<Version> previousMajorVersions = groupByMajor.get(getPreviousMajor(currentMajor));
final Map<Integer, List<Version>> groupByMinor;
if (currentMajorVersions.size() == 1) {
@ -353,23 +363,36 @@ public class BwcVersions {
}
public List<Version> getIndexCompatible() {
return unmodifiableList(
Stream.concat(groupByMajor.get(currentVersion.getMajor() - 1).stream(), groupByMajor.get(currentVersion.getMajor()).stream())
.filter(version -> version.equals(currentVersion) == false)
.collect(Collectors.toList())
);
int currentMajor = currentVersion.getMajor();
int prevMajor = getPreviousMajor(currentMajor);
List<Version> result = Stream.concat(groupByMajor.get(prevMajor).stream(), groupByMajor.get(currentMajor).stream())
.filter(version -> version.equals(currentVersion) == false)
.collect(Collectors.toList());
if (currentMajor == 1) {
// add 6.x compatible for OpenSearch 1.0.0
return unmodifiableList(Stream.concat(groupByMajor.get(prevMajor - 1).stream(), result.stream()).collect(Collectors.toList()));
}
return unmodifiableList(result);
}
public List<Version> getWireCompatible() {
List<Version> wireCompat = new ArrayList<>();
List<Version> prevMajors = groupByMajor.get(currentVersion.getMajor() - 1);
int minor = prevMajors.get(prevMajors.size() - 1).getMinor();
for (int i = prevMajors.size() - 1; i > 0 && prevMajors.get(i).getMinor() == minor; i--) {
wireCompat.add(prevMajors.get(i));
int currentMajor = currentVersion.getMajor();
int lastMajor = currentMajor == 1 ? 6 : currentMajor - 1;
List<Version> lastMajorList = groupByMajor.get(lastMajor);
int minor = lastMajorList.get(lastMajorList.size() - 1).getMinor();
for (int i = lastMajorList.size() - 1; i > 0 && lastMajorList.get(i).getMinor() == minor; --i) {
wireCompat.add(lastMajorList.get(i));
}
wireCompat.addAll(groupByMajor.get(currentVersion.getMajor()));
// if current is OpenSearch 1.0.0 add all of the 7.x line:
if (currentMajor == 1) {
List<Version> previousMajor = groupByMajor.get(7);
for (Version v : previousMajor) {
wireCompat.add(v);
}
}
wireCompat.addAll(groupByMajor.get(currentMajor));
wireCompat.remove(currentVersion);
wireCompat.sort(Version::compareTo);
@ -388,4 +411,8 @@ public class BwcVersions {
return unmodifiableList(unreleasedWireCompatible);
}
private int getPreviousMajor(int currentMajor) {
return currentMajor == 1 ? 7 : currentMajor - 1;
}
}

View File

@ -43,6 +43,8 @@ public final class Version implements Comparable<Version> {
private final int minor;
private final int revision;
private final int id;
// used to identify rebase to OpenSearch 1.0.0
public static final int MASK = 0x08000000;
/**
* Specifies how a version string should be parsed.
@ -73,7 +75,9 @@ public final class Version implements Comparable<Version> {
this.revision = revision;
// currently snapshot is not taken into account
this.id = major * 10000000 + minor * 100000 + revision * 1000;
int id = major * 10000000 + minor * 100000 + revision * 1000;
// identify if new OpenSearch version 1
this.id = major == 1 ? id ^ MASK : id;
}
private static int parseSuffixNumber(String substring) {

View File

@ -72,6 +72,7 @@ import java.util.stream.Stream;
public class GlobalBuildInfoPlugin implements Plugin<Project> {
private static final Logger LOGGER = Logging.getLogger(GlobalBuildInfoPlugin.class);
private static final String DEFAULT_LEGACY_VERSION_JAVA_FILE_PATH = "server/src/main/java/org/opensearch/LegacyESVersion.java";
private static final String DEFAULT_VERSION_JAVA_FILE_PATH = "server/src/main/java/org/opensearch/Version.java";
private static Integer _defaultParallel = null;
private static Boolean _isBundledJdkSupported = null;
@ -140,9 +141,13 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
* compatibility. It is *super* important that this logic is the same as the
* logic in VersionUtils.java. */
private static BwcVersions resolveBwcVersions(File root) {
// todo redesign this terrible unreliable hack; should NEVER rely on parsing a source file
// for now, we hack the hack
File versionsFile = new File(root, DEFAULT_VERSION_JAVA_FILE_PATH);
try {
List<String> versionLines = IOUtils.readLines(new FileInputStream(versionsFile), "UTF-8");
File legacyVersionsFile = new File(root, DEFAULT_LEGACY_VERSION_JAVA_FILE_PATH);
try (FileInputStream fis = new FileInputStream(versionsFile); FileInputStream fis2 = new FileInputStream(legacyVersionsFile)) {
List<String> versionLines = IOUtils.readLines(fis, "UTF-8");
versionLines.addAll(IOUtils.readLines(fis2, "UTF-8"));
return new BwcVersions(versionLines);
} catch (IOException e) {
throw new IllegalStateException("Unable to resolve to resolve bwc versions from versionsFile.", e);

View File

@ -1151,13 +1151,13 @@ public class OpenSearchNode implements TestClusterConfiguration {
} else {
baseConfig.put("script.max_compilations_rate", "2048/1m");
}
if (getVersion().getMajor() >= 6) {
if (getVersion().onOrAfter("6.0.0")) {
baseConfig.put("cluster.routing.allocation.disk.watermark.flood_stage", "1b");
}
// Temporarily disable the real memory usage circuit breaker. It depends on real memory usage which we have no full control
// over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client
// can retry on circuit breaking exceptions, we can revert again to the default configuration.
if (getVersion().getMajor() >= 7) {
if (getVersion().onOrAfter("7.0.0")) {
baseConfig.put("indices.breaker.total.use_real_memory", "false");
}
// Don't wait for state, just start up quickly. This will also allow new and old nodes in the BWC case to become the master
@ -1236,7 +1236,7 @@ public class OpenSearchNode implements TestClusterConfiguration {
if (version.onOrAfter("6.2.0")) {
expansions.put("logs/gc.log", relativeLogPath.resolve("gc.log").toString());
}
if (getVersion().getMajor() >= 7) {
if (getVersion().onOrAfter("7.0.0")) {
expansions.put(
"-XX:ErrorFile=logs/hs_err_pid%p.log",
"-XX:ErrorFile=" + relativeLogPath.resolve("hs_err_pid%p.log").toString()

View File

@ -52,11 +52,11 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
private static Project packagesProject;
private static Project bwcProject;
private static final Version BWC_MAJOR_VERSION = Version.fromString("2.0.0");
private static final Version BWC_MINOR_VERSION = Version.fromString("1.1.0");
private static final Version BWC_STAGED_VERSION = Version.fromString("1.0.0");
private static final Version BWC_BUGFIX_VERSION = Version.fromString("1.0.1");
private static final Version BWC_MAINTENANCE_VERSION = Version.fromString("0.90.1");
private static final Version BWC_MAJOR_VERSION = Version.fromString("4.0.0");
private static final Version BWC_MINOR_VERSION = Version.fromString("3.1.0");
private static final Version BWC_STAGED_VERSION = Version.fromString("3.0.0");
private static final Version BWC_BUGFIX_VERSION = Version.fromString("3.0.1");
private static final Version BWC_MAINTENANCE_VERSION = Version.fromString("2.90.1");
private static final BwcVersions BWC_MINOR = new BwcVersions(
new TreeSet<>(Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)),
BWC_MAJOR_VERSION

View File

@ -64,7 +64,8 @@ public class VersionTests extends GradleUnitTestCase {
}
public void testCompareWithStringVersions() {
assertTrue("1.10.20 is not interpreted as before 2.0.0", Version.fromString("1.10.20").before("2.0.0"));
// 1.10.2 is now rebased to OpenSearch version; so this needs to report
assertTrue("OpenSearch 1.10.20 is not interpreted as after Legacy 2.0.0", Version.fromString("1.10.20").after("2.0.0"));
assertTrue(
"7.0.0-alpha1 should be equal to 7.0.0-alpha1",
Version.fromString("7.0.0-alpha1").equals(Version.fromString("7.0.0-alpha1"))

View File

@ -1,4 +1,4 @@
opensearch = 7.10.3
opensearch = 1.0.0
lucene = 8.7.0
bundled_jdk_vendor = adoptopenjdk

View File

@ -33,6 +33,7 @@
package org.opensearch.client.core;
import org.opensearch.Build;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.client.AbstractResponseTestCase;
import org.opensearch.cluster.ClusterName;
@ -52,7 +53,7 @@ public class MainResponseTests extends AbstractResponseTestCase<org.opensearch.a
ClusterName clusterName = new ClusterName(randomAlphaOfLength(10));
String nodeName = randomAlphaOfLength(10);
final String date = new Date(randomNonNegativeLong()).toString();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_1, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_1, Version.CURRENT);
Build build = new Build(
Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(),
version.toString()

View File

@ -41,6 +41,7 @@ import java.util.Map;
import java.util.stream.Collectors;
import org.apache.lucene.util.LuceneTestCase;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cli.ExitCodes;
import org.opensearch.cli.MockTerminal;
@ -262,7 +263,7 @@ public class ListPluginsCommandTests extends OpenSearchTestCase {
"version",
"1.0",
"opensearch.version",
Version.fromString("1.0.0").toString(),
LegacyESVersion.fromString("5.0.0").toString(),
"java.version",
System.getProperty("java.specification.version"),
"classname",

View File

@ -35,7 +35,7 @@ package org.opensearch.analysis.common;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cjk.CJKBigramFilter;
import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -110,7 +110,7 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenFilterFactory getSynonymFilter() {
if (outputUnigrams) {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() +
"] cannot be used to parse synonyms");
}

View File

@ -124,7 +124,7 @@ import org.apache.lucene.analysis.tr.ApostropheFilter;
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
import org.apache.lucene.analysis.util.ElisionFilter;
import org.apache.lucene.util.SetOnce;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.client.Client;
import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
import org.opensearch.cluster.service.ClusterService;
@ -335,7 +335,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new);
tokenizers.put("thai", ThaiTokenizerFactory::new);
tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(org.opensearch.Version.V_7_6_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) {
deprecationLogger.deprecate("nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [ngram] instead.");
@ -344,7 +344,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
});
tokenizers.put("ngram", NGramTokenizerFactory::new);
tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(org.opensearch.Version.V_7_6_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) {
deprecationLogger.deprecate("edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [edge_ngram] instead.");
@ -425,7 +425,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
List<PreConfiguredCharFilter> filters = new ArrayList<>();
filters.add(PreConfiguredCharFilter.singleton("html_strip", false, HTMLStripCharFilter::new));
filters.add(PreConfiguredCharFilter.openSearchVersion("htmlStrip", false, (reader, version) -> {
if (version.onOrAfter(org.opensearch.Version.V_6_3_0)) {
if (version.onOrAfter(LegacyESVersion.V_6_3_0)) {
deprecationLogger.deprecate("htmlStrip_deprecation",
"The [htmpStrip] char filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [html_strip] instead.");
@ -452,11 +452,11 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
filters.add(PreConfiguredTokenFilter.singleton("czech_stem", false, CzechStemFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("decimal_digit", true, DecimalDigitFilter::new));
filters.add(PreConfiguredTokenFilter.openSearchVersion("delimited_payload_filter", false, (input, version) -> {
if (version.onOrAfter(Version.V_7_0_0)) {
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead");
}
if (version.onOrAfter(Version.V_6_2_0)) {
if (version.onOrAfter(LegacyESVersion.V_6_2_0)) {
deprecationLogger.deprecate("analysis_delimited_payload_filter",
"Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]");
}
@ -472,7 +472,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
filters.add(PreConfiguredTokenFilter.singleton("edge_ngram", false, false, input ->
new EdgeNGramTokenFilter(input, 1)));
filters.add(PreConfiguredTokenFilter.openSearchVersion("edgeNGram", false, false, (reader, version) -> {
if (version.onOrAfter(org.opensearch.Version.V_7_0_0)) {
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [edge_ngram] instead.");
@ -500,7 +500,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
LimitTokenCountFilterFactory.DEFAULT_CONSUME_ALL_TOKENS)));
filters.add(PreConfiguredTokenFilter.singleton("ngram", false, false, reader -> new NGramTokenFilter(reader, 1, 2, false)));
filters.add(PreConfiguredTokenFilter.openSearchVersion("nGram", false, false, (reader, version) -> {
if (version.onOrAfter(org.opensearch.Version.V_7_0_0)) {
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [ngram] instead.");
} else {
@ -546,7 +546,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
| WordDelimiterFilter.SPLIT_ON_NUMERICS
| WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE, null)));
filters.add(PreConfiguredTokenFilter.openSearchVersion("word_delimiter_graph", false, false, (input, version) -> {
boolean adjustOffsets = version.onOrAfter(Version.V_7_3_0);
boolean adjustOffsets = version.onOrAfter(LegacyESVersion.V_7_3_0);
return new WordDelimiterGraphFilter(input, adjustOffsets, WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE,
WordDelimiterGraphFilter.GENERATE_WORD_PARTS
| WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS
@ -568,7 +568,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("edge_ngram", (version) -> {
if (version.onOrAfter(Version.V_7_3_0)) {
if (version.onOrAfter(LegacyESVersion.V_7_3_0)) {
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
@ -581,7 +581,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
// Temporary shim for aliases. TODO deprecate after they are moved
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("nGram", (version) -> {
if (version.onOrAfter(org.opensearch.Version.V_7_6_0)) {
if (version.onOrAfter(LegacyESVersion.V_7_6_0)) {
deprecationLogger.deprecate("nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [ngram] instead.");
@ -589,12 +589,12 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
return new NGramTokenizer();
}));
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("edgeNGram", (version) -> {
if (version.onOrAfter(org.opensearch.Version.V_7_6_0)) {
if (version.onOrAfter(LegacyESVersion.V_7_6_0)) {
deprecationLogger.deprecate("edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
+ "Please change the tokenizer name to [edge_ngram] instead.");
}
if (version.onOrAfter(Version.V_7_3_0)) {
if (version.onOrAfter(LegacyESVersion.V_7_3_0)) {
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);

View File

@ -36,7 +36,7 @@ import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.commongrams.CommonGramsFilter;
import org.apache.lucene.analysis.commongrams.CommonGramsQueryFilter;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -79,7 +79,7 @@ public class CommonGramsTokenFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
} else {
DEPRECATION_LOGGER.deprecate("synonym_tokenfilters", "Token filter [" + name()

View File

@ -35,7 +35,7 @@ package org.opensearch.analysis.common;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
import org.apache.lucene.analysis.reverse.ReverseStringFilter;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -100,7 +100,7 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {

View File

@ -34,7 +34,7 @@ package org.opensearch.analysis.common;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.FingerprintFilter;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -67,7 +67,7 @@ public class FingerprintTokenFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {

View File

@ -32,7 +32,7 @@
package org.opensearch.analysis.common;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -45,11 +45,11 @@ public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTo
LegacyDelimitedPayloadTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, env, name, settings);
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead");
}
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_2_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_6_2_0)) {
deprecationLogger.deprecate("analysis_legacy_delimited_payload_filter",
"Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]");
}

View File

@ -37,7 +37,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter;
import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.Strings;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
@ -74,7 +74,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {
@ -137,7 +137,7 @@ public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {

View File

@ -34,7 +34,7 @@ package org.opensearch.analysis.common;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -59,7 +59,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
this.maxGram = settings.getAsInt("max_gram", 2);
int ngramDiff = maxGram - minGram;
if (ngramDiff > maxAllowedNgramDiff) {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["
@ -80,7 +80,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {

View File

@ -34,7 +34,7 @@ package org.opensearch.analysis.common;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.ngram.NGramTokenizer;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexSettings;
@ -125,7 +125,7 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
int ngramDiff = maxGram - minGram;
if (ngramDiff > maxAllowedNgramDiff) {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["

View File

@ -33,7 +33,7 @@
package org.opensearch.analysis.common;
import org.apache.lucene.analysis.CharArraySet;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -58,7 +58,7 @@ public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProv
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords);
analyzer = new StandardHtmlStripAnalyzer(stopWords);
analyzer.setVersion(version);
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " +
"use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter");
} else {

View File

@ -36,7 +36,7 @@ import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -121,7 +121,7 @@ public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFac
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {

View File

@ -36,7 +36,7 @@ import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -124,7 +124,7 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {

View File

@ -34,6 +34,7 @@ package org.opensearch.analysis.common;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@ -57,7 +58,8 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
public void testNGramDeprecationWarning() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
VersionUtils.randomVersionBetween(
random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)))
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "nGram")
@ -76,7 +78,7 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
*/
public void testNGramDeprecationError() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, null))
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, null))
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "nGram")
@ -96,7 +98,8 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
public void testEdgeNGramDeprecationWarning() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_6_4_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
VersionUtils.randomVersionBetween(
random(), LegacyESVersion.V_6_4_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)))
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "edgeNGram")
@ -114,7 +117,7 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
*/
public void testEdgeNGramDeprecationError() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, null))
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, null))
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "edgeNGram")
@ -134,7 +137,7 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
public void testStandardHtmlStripAnalyzerDeprecationError() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT))
.put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip")
.putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b")
.build();
@ -153,8 +156,8 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
public void testStandardHtmlStripAnalyzerDeprecationWarning() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
VersionUtils.getPreviousVersion(Version.V_7_0_0)))
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)))
.put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip")
.putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b")
.build();
@ -176,7 +179,7 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
public void testnGramFilterInCustomAnalyzerDeprecationError() throws IOException {
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT))
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram")
@ -196,7 +199,7 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOException {
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT))
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram")
@ -216,19 +219,19 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
public void testNGramTokenizerDeprecation() throws IOException {
// tests for prebuilt tokenizer
doTestPrebuiltTokenizerDeprecation("nGram", "ngram",
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.V_7_5_2), false);
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2), false);
doTestPrebuiltTokenizerDeprecation("edgeNGram", "edge_ngram",
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.V_7_5_2), false);
doTestPrebuiltTokenizerDeprecation("nGram", "ngram", Version.V_7_6_0, true);
doTestPrebuiltTokenizerDeprecation("edgeNGram", "edge_ngram", Version.V_7_6_0, true);
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2), false);
doTestPrebuiltTokenizerDeprecation("nGram", "ngram", LegacyESVersion.V_7_6_0, true);
doTestPrebuiltTokenizerDeprecation("edgeNGram", "edge_ngram", LegacyESVersion.V_7_6_0, true);
// same batch of tests for custom tokenizer definition in the settings
doTestCustomTokenizerDeprecation("nGram", "ngram",
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.V_7_5_2), false);
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2), false);
doTestCustomTokenizerDeprecation("edgeNGram", "edge_ngram",
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.V_7_5_2), false);
doTestCustomTokenizerDeprecation("nGram", "ngram", Version.V_7_6_0, true);
doTestCustomTokenizerDeprecation("edgeNGram", "edge_ngram", Version.V_7_6_0, true);
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_5_2), false);
doTestCustomTokenizerDeprecation("nGram", "ngram", LegacyESVersion.V_7_6_0, true);
doTestCustomTokenizerDeprecation("edgeNGram", "edge_ngram", LegacyESVersion.V_7_6_0, true);
}
public void doTestPrebuiltTokenizerDeprecation(String deprecatedName, String replacement, Version version, boolean expectWarning)

View File

@ -33,6 +33,7 @@
package org.opensearch.analysis.common;
import org.apache.lucene.analysis.Tokenizer;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@ -70,8 +71,8 @@ public class EdgeNGramTokenizerTests extends OpenSearchTokenStreamTestCase {
// Before 7.3 we return ngrams of length 1 only
{
Version version = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0,
VersionUtils.getPreviousVersion(Version.V_7_3_0));
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.fromString("7.0.0"),
VersionUtils.getPreviousVersion(LegacyESVersion.fromString("7.3.0")));
try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edge_ngram")) {
NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
assertNotNull(analyzer);
@ -81,8 +82,8 @@ public class EdgeNGramTokenizerTests extends OpenSearchTokenStreamTestCase {
// Check deprecated name as well
{
Version version = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0,
VersionUtils.getPreviousVersion(Version.V_7_3_0));
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.fromString("7.0.0"),
VersionUtils.getPreviousVersion(LegacyESVersion.fromString("7.3.0")));
try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edgeNGram")) {
NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
assertNotNull(analyzer);
@ -102,7 +103,7 @@ public class EdgeNGramTokenizerTests extends OpenSearchTokenStreamTestCase {
// Check deprecated name as well, needs version before 8.0 because throws IAE after that
{
try (IndexAnalyzers indexAnalyzers = buildAnalyzers(
VersionUtils.randomVersionBetween(random(), Version.V_7_3_0, Version.CURRENT),
VersionUtils.randomVersionBetween(random(), LegacyESVersion.fromString("7.3.0"), Version.CURRENT),
"edgeNGram")) {
NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
assertNotNull(analyzer);

View File

@ -32,6 +32,7 @@
package org.opensearch.analysis.common;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@ -54,7 +55,8 @@ public class HtmlStripCharFilterFactoryTests extends OpenSearchTestCase {
*/
public void testDeprecationWarning() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_6_3_0, Version.CURRENT))
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_3_0, Version.CURRENT))
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
@ -73,7 +75,7 @@ public class HtmlStripCharFilterFactoryTests extends OpenSearchTestCase {
public void testNoDeprecationWarningPre6_3() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_2_4))
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_2_4))
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);

View File

@ -37,6 +37,7 @@ import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.KeywordTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@ -219,7 +220,7 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase {
Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.synonyms.type", "synonym")
.putList("index.analysis.filter.synonyms.synonyms", "programmer, developer")
@ -271,7 +272,7 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase {
Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
@ -295,7 +296,8 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase {
Settings settings2 = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
VersionUtils.randomVersionBetween(
random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)))
.put("path.home", createTempDir().toString())
.putList("common_words", "a", "b")
.put("output_unigrams", "true")
@ -319,7 +321,7 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase {
Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.putList("common_words", "a", "b")
.put("output_unigrams", "true")
@ -348,7 +350,8 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase {
settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
VersionUtils.randomVersionBetween(
random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)))
.put("path.home", createTempDir().toString())
.putList("common_words", "a", "b")
.put("output_unigrams", "true")
@ -370,7 +373,8 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase {
settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
VersionUtils.randomVersionBetween(
random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)))
.put("path.home", createTempDir().toString())
.put("preserve_original", "false")
.build();

View File

@ -33,6 +33,7 @@ package org.opensearch.analysis.common;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@ -171,7 +172,8 @@ public class WordDelimiterGraphTokenFilterFactoryTests
.build();
Settings indexSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, VersionUtils.getPreviousVersion(Version.V_7_3_0)))
VersionUtils.randomVersionBetween(
random(), LegacyESVersion.V_7_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_3_0)))
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph")
.build();

View File

@ -31,7 +31,7 @@
package org.opensearch.ingest.common;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionListener;
import org.opensearch.action.ActionRequest;
import org.opensearch.action.ActionRequestValidationException;
@ -79,7 +79,7 @@ public class GrokProcessorGetAction extends ActionType<GrokProcessorGetAction.Re
Request(StreamInput in) throws IOException {
super(in);
this.sorted = in.getVersion().onOrAfter(Version.V_7_10_0) ? in.readBoolean() : false;
this.sorted = in.getVersion().onOrAfter(LegacyESVersion.V_7_10_0) ? in.readBoolean() : false;
}
@Override
@ -90,7 +90,7 @@ public class GrokProcessorGetAction extends ActionType<GrokProcessorGetAction.Re
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_7_10_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_10_0)) {
out.writeBoolean(sorted);
}
}

View File

@ -32,8 +32,8 @@
package org.opensearch.script.mustache;
import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchException;
import org.opensearch.Version;
import org.opensearch.action.ActionResponse;
import org.opensearch.action.search.MultiSearchResponse;
import org.opensearch.common.Nullable;
@ -125,7 +125,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera
MultiSearchTemplateResponse(StreamInput in) throws IOException {
super(in);
items = in.readArray(Item::new, Item[]::new);
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) {
tookInMillis = in.readVLong();
} else {
tookInMillis = -1L;
@ -159,7 +159,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeArray(items);
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) {
out.writeVLong(tookInMillis);
}
}

View File

@ -42,7 +42,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.opensearch.action.ActionRequestValidationException;
@ -297,7 +297,7 @@ public class PainlessExecuteAction extends ActionType<PainlessExecuteAction.Resp
Request(StreamInput in) throws IOException {
super(in);
script = new Script(in);
if (in.getVersion().before(Version.V_6_4_0)) {
if (in.getVersion().before(LegacyESVersion.V_6_4_0)) {
byte scriptContextId = in.readByte();
assert scriptContextId == 0;
context = null;
@ -341,7 +341,7 @@ public class PainlessExecuteAction extends ActionType<PainlessExecuteAction.Resp
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
script.writeTo(out);
if (out.getVersion().before(Version.V_6_4_0)) {
if (out.getVersion().before(LegacyESVersion.V_6_4_0)) {
out.writeByte((byte) 0);
} else {
out.writeString(context.name);

View File

@ -1,8 +1,8 @@
---
setup:
- skip:
version: " - 7.6.99"
reason: "implemented in 7.7.0"
version: " - 7.5.99"
reason: "The bug was corrected from 7.6"
- do:
indices.create:
@ -59,49 +59,9 @@ teardown:
---
"Test two sub-queries with only one having inner_hits":
- skip:
version: " - 7.59.99"
version: " - 7.5.99"
reason: "The bug was corrected from 7.6"
- do:
indices.create:
index: test
body:
mappings:
properties:
entity_type: { "type": "keyword" }
join_field: { "type": "join", "relations": { "question": "answer", "person" : "address" } }
settings:
number_of_shards: 1
- do:
index:
index: test
id: 1
body: { "join_field": { "name": "question" }, "entity_type": "question" }
- do:
index:
index: test
id: 2
routing: 1
body: { "join_field": { "name": "answer", "parent": 1} , "entity_type": "answer" }
- do:
index:
index: test
id: 3
body: { "join_field": { "name": "person" }, "entity_type": "person" }
- do:
index:
index: test
routing: 3
id: 4
body: { "join_field": { "name": "address", "parent": 3 }, "entity_type": "address" }
- do:
indices.refresh: {}
- do:
search:
index: test

View File

@ -55,6 +55,7 @@ import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.SetOnce;
import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchException;
import org.opensearch.ResourceNotFoundException;
import org.opensearch.Version;
@ -264,10 +265,10 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
PercolateQueryBuilder(StreamInput in) throws IOException {
super(in);
field = in.readString();
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
name = in.readOptionalString();
}
if (in.getVersion().before(Version.V_6_0_0_beta1)) {
if (in.getVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
documentType = in.readString();
} else {
documentType = in.readOptionalString();
@ -282,7 +283,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
} else {
indexedDocumentVersion = null;
}
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
documents = in.readList(StreamInput::readBytesReference);
} else {
BytesReference document = in.readOptionalBytesReference();
@ -311,10 +312,10 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
throw new IllegalStateException("supplier must be null, can't serialize suppliers, missing a rewriteAndFetch?");
}
out.writeString(field);
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
out.writeOptionalString(name);
}
if (out.getVersion().before(Version.V_6_0_0_beta1)) {
if (out.getVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
out.writeString(documentType);
} else {
out.writeOptionalString(documentType);
@ -330,7 +331,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
} else {
out.writeBoolean(false);
}
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
out.writeVInt(documents.size());
for (BytesReference document : documents) {
out.writeBytesReference(document);
@ -657,7 +658,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
if (binaryDocValues == null) {
return docId -> null;
}
if (indexVersion.onOrAfter(Version.V_6_0_0_beta2)) {
if (indexVersion.onOrAfter(LegacyESVersion.V_6_0_0_beta2)) {
return docId -> {
if (binaryDocValues.advanceExact(docId)) {
BytesRef qbSource = binaryDocValues.binaryValue();

View File

@ -55,6 +55,7 @@ import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.common.ParsingException;
@ -286,7 +287,7 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
}
BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder();
if (canUseMinimumShouldMatchField && indexVersion.onOrAfter(Version.V_6_1_0)) {
if (canUseMinimumShouldMatchField && indexVersion.onOrAfter(LegacyESVersion.V_6_1_0)) {
LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name());
for (BytesRef extractedTerm : extractedTerms) {
subQueries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm)));
@ -393,7 +394,7 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField,
QueryBuilder queryBuilder, ParseContext context) throws IOException {
if (indexVersion.onOrAfter(Version.V_6_0_0_beta2)) {
if (indexVersion.onOrAfter(LegacyESVersion.V_6_0_0_beta2)) {
try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) {
try (OutputStreamStreamOutput out = new OutputStreamStreamOutput(stream)) {
out.setVersion(indexVersion);
@ -457,7 +458,7 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
}
createFieldNamesField(context);
if (indexVersionCreated.onOrAfter(Version.V_6_1_0)) {
if (indexVersionCreated.onOrAfter(LegacyESVersion.V_6_1_0)) {
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
}
}

View File

@ -53,6 +53,7 @@ import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.common.lucene.search.function.FunctionScoreQuery;
import org.opensearch.index.query.DateRangeIncludingNowQuery;
@ -222,7 +223,7 @@ final class QueryAnalyzer {
boolean verified = isVerified(query);
Set<QueryExtraction> qe = Arrays.stream(terms).map(QueryExtraction::new).collect(Collectors.toSet());
if (qe.size() > 0) {
if (version.before(Version.V_6_1_0) && conjunction) {
if (version.before(LegacyESVersion.V_6_1_0) && conjunction) {
Optional<QueryExtraction> longest = qe.stream()
.filter(q -> q.term != null)
.max(Comparator.comparingInt(q -> q.term.bytes().length));
@ -290,7 +291,7 @@ final class QueryAnalyzer {
if (conjunctionsWithUnknowns.size() == 1) {
return conjunctionsWithUnknowns.get(0);
}
if (version.onOrAfter(Version.V_6_1_0)) {
if (version.onOrAfter(LegacyESVersion.V_6_1_0)) {
for (Result subResult : conjunctions) {
if (subResult.isMatchNoDocs()) {
return subResult;
@ -382,7 +383,7 @@ final class QueryAnalyzer {
// Keep track of the msm for each clause:
List<Integer> clauses = new ArrayList<>(disjunctions.size());
boolean verified;
if (version.before(Version.V_6_1_0)) {
if (version.before(LegacyESVersion.V_6_1_0)) {
verified = requiredShouldClauses <= 1;
} else {
verified = true;
@ -433,7 +434,7 @@ final class QueryAnalyzer {
boolean matchAllDocs = numMatchAllClauses > 0 && numMatchAllClauses >= requiredShouldClauses;
int msm = 0;
if (version.onOrAfter(Version.V_6_1_0) &&
if (version.onOrAfter(LegacyESVersion.V_6_1_0) &&
// Having ranges would mean we need to juggle with the msm and that complicates this logic a lot,
// so for now lets not do it.
hasRangeExtractions == false) {

View File

@ -90,6 +90,7 @@ import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.CheckedFunction;
@ -612,7 +613,7 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase {
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
Version v = Version.V_6_1_0;
Version v = LegacyESVersion.V_6_1_0;
MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")),

View File

@ -53,6 +53,7 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.BytesRef;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -425,7 +426,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class));
assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class));
t = fieldType.createCandidateQuery(indexReader, Version.V_6_0_0);
t = fieldType.createCandidateQuery(indexReader, LegacyESVersion.V_6_0_0);
assertTrue(t.v2());
assertEquals(2, t.v1().clauses().size());
assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class));

View File

@ -69,6 +69,7 @@ import org.apache.lucene.search.spans.SpanNotQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.common.lucene.search.function.CombineFunction;
import org.opensearch.common.lucene.search.function.FunctionScoreQuery;
@ -171,7 +172,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
.add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_very_long_term")})
.add(new Term[] {new Term("_field", "_very_long_term")})
.build();
Result result = analyze(multiPhraseQuery, Version.V_6_0_0);
Result result = analyze(multiPhraseQuery, LegacyESVersion.V_6_0_0);
assertThat(result.verified, is(false));
assertThat(result.minimumShouldMatch, equalTo(1));
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
@ -242,7 +243,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD);
BooleanQuery booleanQuery = builder.build();
Result result = analyze(booleanQuery, Version.V_6_0_0);
Result result = analyze(booleanQuery, LegacyESVersion.V_6_0_0);
assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false));
assertThat(result.minimumShouldMatch, equalTo(1));
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
@ -353,7 +354,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
builder.add(termQuery3, BooleanClause.Occur.SHOULD);
BooleanQuery booleanQuery = builder.build();
Result result = analyze(booleanQuery, Version.V_6_0_0);
Result result = analyze(booleanQuery, LegacyESVersion.V_6_0_0);
assertThat(result.verified, is(false));
assertThat(result.minimumShouldMatch, equalTo(1));
List<QueryExtraction> extractions = new ArrayList<>(result.extractions);
@ -418,7 +419,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
assertThat(result.minimumShouldMatch, equalTo(0));
assertTermsEqual(result.extractions);
result = analyze(booleanQuery, Version.V_6_0_0);
result = analyze(booleanQuery, LegacyESVersion.V_6_0_0);
assertThat(result.matchAllDocs, is(true));
assertThat(result.verified, is(false));
assertThat(result.minimumShouldMatch, equalTo(0));
@ -679,7 +680,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true)
.addClause(spanTermQuery1).addClause(spanTermQuery2).build();
Result result = analyze(spanNearQuery, Version.V_6_0_0);
Result result = analyze(spanNearQuery, LegacyESVersion.V_6_0_0);
assertThat(result.verified, is(false));
assertThat(result.minimumShouldMatch, equalTo(1));
assertTermsEqual(result.extractions, spanTermQuery2.getTerm());
@ -1229,7 +1230,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
BooleanQuery.Builder boolQuery = new BooleanQuery.Builder();
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
Result result = analyze(boolQuery.build(), Version.V_6_0_0);
Result result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
assertFalse(result.verified);
assertThat(result.minimumShouldMatch, equalTo(1));
assertEquals(1, result.extractions.size());
@ -1238,7 +1239,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
boolQuery = new BooleanQuery.Builder();
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
boolQuery.add(IntPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
result = analyze(boolQuery.build(), Version.V_6_0_0);
result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
assertFalse(result.verified);
assertThat(result.minimumShouldMatch, equalTo(1));
assertEquals(1, result.extractions.size());
@ -1247,7 +1248,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
boolQuery = new BooleanQuery.Builder();
boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
boolQuery.add(DoublePoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
result = analyze(boolQuery.build(), Version.V_6_0_0);
result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
assertFalse(result.verified);
assertThat(result.minimumShouldMatch, equalTo(1));
assertEquals(1, result.extractions.size());
@ -1256,7 +1257,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
boolQuery = new BooleanQuery.Builder();
boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
boolQuery.add(FloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
result = analyze(boolQuery.build(), Version.V_6_0_0);
result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
assertFalse(result.verified);
assertThat(result.minimumShouldMatch, equalTo(1));
assertEquals(1, result.extractions.size());
@ -1265,7 +1266,7 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
boolQuery = new BooleanQuery.Builder();
boolQuery.add(HalfFloatPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
boolQuery.add(HalfFloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
result = analyze(boolQuery.build(), Version.V_6_0_0);
result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
assertFalse(result.verified);
assertThat(result.minimumShouldMatch, equalTo(1));
assertEquals(1, result.extractions.size());

View File

@ -41,6 +41,7 @@ import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.CheckedFunction;
@ -89,7 +90,7 @@ public class QueryBuilderStoreTests extends OpenSearchTestCase {
BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder(
new Mapper.BuilderContext(settings, new ContentPath(0)));
Version version = Version.V_6_0_0_beta2;
Version version = LegacyESVersion.V_6_0_0_beta2;
try (IndexWriter indexWriter = new IndexWriter(directory, config)) {
for (int i = 0; i < queryBuilders.length; i++) {
queryBuilders[i] = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(8));

View File

@ -32,7 +32,7 @@
package org.opensearch.index.rankeval;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequest;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.IndicesRequest;
@ -69,7 +69,7 @@ public class RankEvalRequest extends ActionRequest implements IndicesRequest.Rep
rankingEvaluationSpec = new RankEvalSpec(in);
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
if (in.getVersion().onOrAfter(Version.V_7_6_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
searchType = SearchType.fromId(in.readByte());
}
}
@ -151,7 +151,7 @@ public class RankEvalRequest extends ActionRequest implements IndicesRequest.Rep
rankingEvaluationSpec.writeTo(out);
out.writeStringArray(indices);
indicesOptions.writeIndicesOptions(out);
if (out.getVersion().onOrAfter(Version.V_7_6_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
out.writeByte(searchType.id());
}
}

View File

@ -33,7 +33,7 @@
package org.opensearch.index.reindex;
import org.apache.logging.log4j.Logger;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionListener;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.action.support.ActionFilters;
@ -102,11 +102,11 @@ public class TransportUpdateByQueryAction extends HandledTransportAction<UpdateB
ClusterState clusterState, ActionListener<BulkByScrollResponse> listener) {
super(task,
// not all nodes support sequence number powered optimistic concurrency control, we fall back to version
clusterState.nodes().getMinNodeVersion().onOrAfter(Version.V_6_7_0) == false,
clusterState.nodes().getMinNodeVersion().onOrAfter(LegacyESVersion.V_6_7_0) == false,
// all nodes support sequence number powered optimistic concurrency control and we can use it
clusterState.nodes().getMinNodeVersion().onOrAfter(Version.V_6_7_0),
clusterState.nodes().getMinNodeVersion().onOrAfter(LegacyESVersion.V_6_7_0),
logger, client, threadPool, request, listener, scriptService, null);
useSeqNoForCAS = clusterState.nodes().getMinNodeVersion().onOrAfter(Version.V_6_7_0);
useSeqNoForCAS = clusterState.nodes().getMinNodeVersion().onOrAfter(LegacyESVersion.V_6_7_0);
}
@Override

View File

@ -32,6 +32,7 @@
package org.opensearch.index.reindex;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.bytes.BytesReference;
@ -83,13 +84,13 @@ public class RoundTripTests extends OpenSearchTestCase {
// Try slices=auto with a version that doesn't support it, which should fail
reindex.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, reindex));
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(LegacyESVersion.V_6_0_0_alpha1, reindex));
assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage());
// Try regular slices with a version that doesn't support slices=auto, which should succeed
reindex.setSlices(between(1, Integer.MAX_VALUE));
tripped = new ReindexRequest(toInputByteStream(reindex));
assertRequestEquals(Version.V_6_0_0_alpha1, reindex, tripped);
assertRequestEquals(LegacyESVersion.V_6_0_0_alpha1, reindex, tripped);
}
public void testUpdateByQueryRequest() throws IOException {
@ -104,7 +105,7 @@ public class RoundTripTests extends OpenSearchTestCase {
// Try slices=auto with a version that doesn't support it, which should fail
update.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, update));
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(LegacyESVersion.V_6_0_0_alpha1, update));
assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage());
// Try regular slices with a version that doesn't support slices=auto, which should succeed
@ -122,7 +123,7 @@ public class RoundTripTests extends OpenSearchTestCase {
// Try slices=auto with a version that doesn't support it, which should fail
delete.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(Version.V_6_0_0_alpha1, delete));
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(LegacyESVersion.V_6_0_0_alpha1, delete));
assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage());
// Try regular slices with a version that doesn't support slices=auto, which should succeed

View File

@ -36,7 +36,7 @@ import com.ibm.icu.text.FilteredNormalizer2;
import com.ibm.icu.text.Normalizer2;
import com.ibm.icu.text.UnicodeSet;
import org.apache.lucene.analysis.TokenStream;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -71,7 +71,7 @@ public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory
final Normalizer2 normalizer,
final Settings settings) {
String unicodeSetFilter = settings.get("unicodeSetFilter");
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
if (unicodeSetFilter != null) {
deprecationLogger.deprecate("icu_normalizer_unicode_set_filter",
"[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]");

View File

@ -48,7 +48,7 @@ import org.apache.lucene.analysis.phonetic.BeiderMorseFilter;
import org.apache.lucene.analysis.phonetic.DaitchMokotoffSoundexFilter;
import org.apache.lucene.analysis.phonetic.DoubleMetaphoneFilter;
import org.apache.lucene.analysis.phonetic.PhoneticFilter;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
@ -159,7 +159,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenFilterFactory getSynonymFilter() {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
else {

View File

@ -32,6 +32,7 @@
package org.opensearch.index.analysis;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
@ -64,7 +65,8 @@ public class AnalysisPhoneticFactoryTests extends AnalysisFactoryTestCase {
AnalysisPhoneticPlugin plugin = new AnalysisPhoneticPlugin();
Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
.put(IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
@ -76,7 +78,7 @@ public class AnalysisPhoneticFactoryTests extends AnalysisFactoryTestCase {
settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(),
Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)))
.put("path.home", createTempDir().toString())
.build();
idxSettings = IndexSettingsModule.newIndexSettings("index", settings);

View File

@ -42,6 +42,7 @@ import com.google.cloud.storage.spi.v1.HttpStorageRpc;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.opensearch.LegacyESVersion;
import org.opensearch.SpecialPermission;
import org.opensearch.common.SuppressForbidden;
import org.opensearch.core.internal.io.IOUtils;
@ -60,7 +61,7 @@ import java.util.stream.Stream;
/**
* Wrapper around reads from GCS that will retry blob downloads that fail part-way through, resuming from where the failure occurred.
* This should be handled by the SDK but it isn't today. This should be revisited in the future (e.g. before removing
* the {@link org.opensearch.Version#V_7_0_0} version constant) and removed if the SDK handles retries itself in the future.
* the {@link LegacyESVersion#V_7_0_0} version constant) and removed if the SDK handles retries itself in the future.
*/
class GoogleCloudStorageRetryingInputStream extends InputStream {

View File

@ -35,6 +35,7 @@ package org.opensearch.repositories.s3;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.ActionListener;
import org.opensearch.action.ActionRunnable;
@ -177,12 +178,12 @@ class S3Repository extends MeteredBlobStoreRepository {
/**
* Artificial delay to introduce after a snapshot finalization or delete has finished so long as the repository is still using the
* backwards compatible snapshot format from before
* {@link org.opensearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION} ({@link org.opensearch.Version#V_7_6_0}).
* {@link org.opensearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION} ({@link LegacyESVersion#V_7_6_0}).
* This delay is necessary so that the eventually consistent nature of AWS S3 does not randomly result in repository corruption when
* doing repository operations in rapid succession on a repository in the old metadata format.
* This setting should not be adjusted in production when working with an AWS S3 backed repository. Doing so risks the repository
* becoming silently corrupted. To get rid of this waiting period, either create a new S3 repository or remove all snapshots older than
* {@link org.opensearch.Version#V_7_6_0} from the repository which will trigger an upgrade of the repository metadata to the new
* {@link LegacyESVersion#V_7_6_0} from the repository which will trigger an upgrade of the repository metadata to the new
* format and disable the cooldown period.
*/
static final Setting<TimeValue> COOLDOWN_PERIOD = Setting.timeSetting(

View File

@ -40,7 +40,7 @@ import com.amazonaws.services.s3.model.S3ObjectInputStream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.core.internal.io.IOUtils;
import java.io.IOException;
@ -52,7 +52,7 @@ import java.util.List;
/**
* Wrapper around an S3 object that will retry the {@link GetObjectRequest} if the download fails part-way through, resuming from where
* the failure occurred. This should be handled by the SDK but it isn't today. This should be revisited in the future (e.g. before removing
* the {@link Version#V_7_0_0} version constant) and removed when the SDK handles retries itself.
* the {@link LegacyESVersion#V_7_0_0} version constant) and removed when the SDK handles retries itself.
*
* See https://github.com/aws/aws-sdk-java/issues/856 for the related SDK issue
*/

View File

@ -33,6 +33,7 @@
package org.opensearch.upgrades;
import org.apache.http.util.EntityUtils;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.client.Request;
import org.opensearch.client.Response;
@ -99,7 +100,7 @@ import static org.hamcrest.Matchers.startsWith;
* with {@code tests.is_old_cluster} set to {@code false}.
*/
public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
private final boolean supportsLenientBooleans = getOldClusterVersion().before(Version.V_6_0_0_alpha1);
private final boolean supportsLenientBooleans = getOldClusterVersion().before(LegacyESVersion.V_6_0_0_alpha1);
private String index;
private String type;
@ -111,7 +112,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
@Before
public void setType() {
type = getOldClusterVersion().before(Version.V_6_7_0) ? "doc" : "_doc";
type = getOldClusterVersion().before(LegacyESVersion.V_6_7_0) ? "doc" : "_doc";
}
public void testSearch() throws Exception {
@ -352,7 +353,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
client().performRequest(updateSettingsRequest);
Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex);
if (getOldClusterVersion().onOrAfter(Version.V_6_4_0) && getOldClusterVersion().before(Version.V_7_0_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_4_0) && getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) {
shrinkIndexRequest.addParameter("copy_settings", "true");
}
shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}");
@ -635,7 +636,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
client().performRequest(updateRequest);
Request getRequest = new Request("GET", "/" + index + "/" + typeName + "/" + docId);
if (getOldClusterVersion().before(Version.V_6_7_0)) {
if (getOldClusterVersion().before(LegacyESVersion.V_6_7_0)) {
getRequest.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
}
Map<String, Object> getRsp = entityAsMap(client().performRequest(getRequest));
@ -684,7 +685,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
Request request = new Request("GET", docLocation);
if (getOldClusterVersion().before(Version.V_6_7_0)) {
if (getOldClusterVersion().before(LegacyESVersion.V_6_7_0)) {
request.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
}
assertThat(toStr(client().performRequest(request)), containsString(doc));
@ -706,7 +707,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
// before timing out
.put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")
.put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster
if (getOldClusterVersion().onOrAfter(Version.V_6_5_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_5_0)) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
}
if (randomBoolean()) {
@ -978,7 +979,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
mappingsAndSettings.startObject("settings");
mappingsAndSettings.field("number_of_shards", 1);
mappingsAndSettings.field("number_of_replicas", 1);
if (getOldClusterVersion().onOrAfter(Version.V_6_5_0) && randomBoolean()) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_5_0) && randomBoolean()) {
mappingsAndSettings.field("soft_deletes.enabled", true);
}
mappingsAndSettings.endObject();
@ -1051,7 +1052,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
closeIndex(index);
}
if (getOldClusterVersion().onOrAfter(Version.V_7_2_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
ensureGreenLongWait(index);
assertClosedIndex(index, true);
} else {
@ -1131,7 +1132,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
* we will hit a warning exception because we put some deprecated settings in that test.
*/
if (isRunningAgainstOldCluster() == false
&& getOldClusterVersion().onOrAfter(Version.V_6_1_0) && getOldClusterVersion().before(Version.V_6_5_0)) {
&& getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_1_0) && getOldClusterVersion().before(LegacyESVersion.V_6_5_0)) {
for (String warning : e.getResponse().getWarnings()) {
assertThat(warning, containsString(
"setting was deprecated and will be removed in a future release! "
@ -1199,7 +1200,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
Map<String, Object> getTemplateResponse = entityAsMap(client().performRequest(getTemplateRequest));
Map<String, Object> expectedTemplate = new HashMap<>();
if (isRunningAgainstOldCluster() && getOldClusterVersion().before(Version.V_6_0_0_beta1)) {
if (isRunningAgainstOldCluster() && getOldClusterVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
expectedTemplate.put("template", "evil_*");
} else {
expectedTemplate.put("index_patterns", singletonList("evil_*"));
@ -1291,7 +1292,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
private String loadInfoDocument(String type) throws IOException {
Request request = new Request("GET", "/info/" + this.type + "/" + index + "_" + type);
request.addParameter("filter_path", "_source");
if (getOldClusterVersion().before(Version.V_6_7_0)) {
if (getOldClusterVersion().before(LegacyESVersion.V_6_7_0)) {
request.setOptions(expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
}
String doc = toStr(client().performRequest(request));
@ -1340,7 +1341,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
}
public void testPeerRecoveryRetentionLeases() throws Exception {
assumeTrue(getOldClusterVersion() + " does not support soft deletes", getOldClusterVersion().onOrAfter(Version.V_6_5_0));
assumeTrue(getOldClusterVersion() + " does not support soft deletes", getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_5_0));
if (isRunningAgainstOldCluster()) {
XContentBuilder settings = jsonBuilder();
settings.startObject();
@ -1348,7 +1349,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
settings.startObject("settings");
settings.field("number_of_shards", between(1, 5));
settings.field("number_of_replicas", between(0, 1));
if (randomBoolean() || getOldClusterVersion().before(Version.V_7_0_0)) {
if (randomBoolean() || getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) {
// this is the default after v7.0.0, but is required before that
settings.field("soft_deletes.enabled", true);
}
@ -1375,7 +1376,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1);
if (getOldClusterVersion().onOrAfter(Version.V_6_7_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_7_0)) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
}
createIndex(index, settings.build());
@ -1406,7 +1407,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
* Verifies that once all shard copies on the new version, we should turn off the translog retention for indices with soft-deletes.
*/
public void testTurnOffTranslogRetentionAfterUpgraded() throws Exception {
assumeTrue("requires soft-deletes and retention leases", getOldClusterVersion().onOrAfter(Version.V_6_7_0));
assumeTrue("requires soft-deletes and retention leases", getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_7_0));
if (isRunningAgainstOldCluster()) {
createIndex(index, Settings.builder()
.put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1)
@ -1433,7 +1434,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1);
if (getOldClusterVersion().onOrAfter(Version.V_6_5_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_5_0)) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
}
if (randomBoolean()) {
@ -1509,7 +1510,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
// make sure .tasks index exists
Request getTasksIndex = new Request("GET", "/.tasks");
getTasksIndex.addParameter("allow_no_indices", "false");
if (getOldClusterVersion().onOrAfter(Version.V_6_7_0) && getOldClusterVersion().before(Version.V_7_0_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_7_0) && getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) {
getTasksIndex.addParameter("include_type_name", "false");
}
@ -1577,7 +1578,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1);
if (getOldClusterVersion().onOrAfter(Version.V_6_5_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_5_0)) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
}
createIndex(index, settings.build());
@ -1628,7 +1629,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
}
public void testForbidDisableSoftDeletesOnRestore() throws Exception {
assumeTrue("soft deletes is introduced in 6.5", getOldClusterVersion().onOrAfter(Version.V_6_5_0));
assumeTrue("soft deletes is introduced in 6.5", getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_5_0));
final String snapshot = "snapshot-" + index;
if (isRunningAgainstOldCluster()) {
final Settings.Builder settings = Settings.builder()

View File

@ -32,7 +32,7 @@
package org.opensearch.upgrades;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.admin.cluster.settings.ClusterGetSettingsResponse;
import org.opensearch.client.Request;
import org.opensearch.client.Response;
@ -56,8 +56,8 @@ import static org.hamcrest.Matchers.equalTo;
public class FullClusterRestartSettingsUpgradeIT extends AbstractFullClusterRestartTestCase {
public void testRemoteClusterSettingsUpgraded() throws IOException {
assumeTrue("skip_unavailable did not exist until 6.1.0", getOldClusterVersion().onOrAfter(Version.V_6_1_0));
assumeTrue("settings automatically upgraded since 6.5.0", getOldClusterVersion().before(Version.V_6_5_0));
assumeTrue("skip_unavailable did not exist until 6.1.0", getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_1_0));
assumeTrue("settings automatically upgraded since 6.5.0", getOldClusterVersion().before(LegacyESVersion.V_6_5_0));
if (isRunningAgainstOldCluster()) {
final Request putSettingsRequest = new Request("PUT", "/_cluster/settings");
try (XContentBuilder builder = jsonBuilder()) {

View File

@ -33,7 +33,7 @@
package org.opensearch.upgrades;
import org.apache.http.util.EntityUtils;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.client.Request;
import org.opensearch.client.Response;
import org.opensearch.common.Strings;
@ -157,7 +157,7 @@ public class QueryBuilderBWCIT extends AbstractFullClusterRestartTestCase {
}
public void testQueryBuilderBWC() throws Exception {
final String type = getOldClusterVersion().before(Version.V_7_0_0) ? "doc" : "_doc";
final String type = getOldClusterVersion().before(LegacyESVersion.V_7_0_0) ? "doc" : "_doc";
String index = "queries";
if (isRunningAgainstOldCluster()) {
XContentBuilder mappingsAndSettings = jsonBuilder();

View File

@ -32,6 +32,7 @@
package org.opensearch.upgrades;
import org.apache.http.util.EntityUtils;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.client.Request;
import org.opensearch.client.Response;
@ -91,8 +92,8 @@ public class IndexingIT extends AbstractRollingTestCase {
{
Version minimumIndexCompatibilityVersion = Version.CURRENT.minimumIndexCompatibilityVersion();
assertThat("this branch is not needed if we aren't compatible with 6.0",
minimumIndexCompatibilityVersion.onOrBefore(Version.V_6_0_0), equalTo(true));
if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0)) {
minimumIndexCompatibilityVersion.onOrBefore(LegacyESVersion.V_6_0_0), equalTo(true));
if (minimumIndexCompatibilityVersion.before(LegacyESVersion.V_7_0_0)) {
XContentBuilder template = jsonBuilder();
template.startObject();
{
@ -203,7 +204,7 @@ public class IndexingIT extends AbstractRollingTestCase {
}
}
if (minNodeVersion.before(Version.V_7_5_0)) {
if (minNodeVersion.before(LegacyESVersion.V_7_5_0)) {
ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(bulk));
assertEquals(400, e.getResponse().getStatusLine().getStatusCode());
assertThat(e.getMessage(),

View File

@ -33,7 +33,7 @@ package org.opensearch.upgrades;
import org.apache.http.HttpStatus;
import org.apache.http.util.EntityUtils;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.client.Node;
import org.opensearch.client.Request;
import org.opensearch.client.RequestOptions;
@ -74,7 +74,7 @@ public class JodaCompatibilityIT extends AbstractRollingTestCase {
@BeforeClass
public static void init(){
assumeTrue("upgrading from 7.0-7.6 will fail parsing joda formats",
UPGRADE_FROM_VERSION.before(Version.V_7_0_0));
UPGRADE_FROM_VERSION.before(LegacyESVersion.V_7_0_0));
}
public void testJodaBackedDocValueAndDateFields() throws Exception {

View File

@ -31,7 +31,7 @@
package org.opensearch.upgrades;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.client.Request;
import org.opensearch.client.Response;
import org.opensearch.common.xcontent.support.XContentMapValues;
@ -42,7 +42,7 @@ public class MappingIT extends AbstractRollingTestCase {
* and check that it can be upgraded to 7x.
*/
public void testAllFieldDisable6x() throws Exception {
assumeTrue("_all", UPGRADE_FROM_VERSION.before(Version.V_7_0_0));
assumeTrue("_all", UPGRADE_FROM_VERSION.before(LegacyESVersion.V_7_0_0));
switch (CLUSTER_TYPE) {
case OLD:
Request createTestIndex = new Request("PUT", "all-index");

View File

@ -32,6 +32,7 @@
package org.opensearch.upgrades;
import org.apache.http.util.EntityUtils;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.client.Request;
@ -351,7 +352,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
if (randomBoolean()) {
indexDocs(index, i, 1); // update
} else if (randomBoolean()) {
if (getNodeId(v -> v.onOrAfter(Version.V_7_0_0)) == null) {
if (getNodeId(v -> v.onOrAfter(LegacyESVersion.V_7_0_0)) == null) {
client().performRequest(new Request("DELETE", index + "/test/" + i));
} else {
client().performRequest(new Request("DELETE", index + "/_doc/" + i));
@ -455,7 +456,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
}
final Version indexVersionCreated = indexVersionCreated(indexName);
if (indexVersionCreated.onOrAfter(Version.V_7_2_0)) {
if (indexVersionCreated.onOrAfter(LegacyESVersion.V_7_2_0)) {
// index was created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
@ -488,7 +489,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
closeIndex(indexName);
}
if (minimumNodeVersion.onOrAfter(Version.V_7_2_0)) {
if (minimumNodeVersion.onOrAfter(LegacyESVersion.V_7_2_0)) {
// index is created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
@ -523,12 +524,12 @@ public class RecoveryIT extends AbstractRollingTestCase {
}
final Version indexVersionCreated = indexVersionCreated(indexName);
if (indexVersionCreated.onOrAfter(Version.V_7_2_0)) {
if (indexVersionCreated.onOrAfter(LegacyESVersion.V_7_2_0)) {
// index was created on a version that supports the replication of closed indices,
// so we expect the index to be closed and replicated
ensureGreen(indexName);
assertClosedIndex(indexName, true);
if (minimumNodeVersion().onOrAfter(Version.V_7_2_0)) {
if (minimumNodeVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
switch (CLUSTER_TYPE) {
case OLD: break;
case MIXED:
@ -777,7 +778,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
final int numberOfReplicas = Integer.parseInt(
getIndexSettingsAsMap(indexName).get(IndexMetadata.SETTING_NUMBER_OF_REPLICAS).toString());
if (minimumNodeVersion.onOrAfter(Version.V_7_6_0)) {
if (minimumNodeVersion.onOrAfter(LegacyESVersion.V_7_6_0)) {
assertEquals(nodes.size() - 2, numberOfReplicas);
ensureGreen(indexName);
} else {

View File

@ -32,6 +32,7 @@
package org.opensearch.upgrades;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.client.Request;
import org.opensearch.client.ResponseException;
@ -58,7 +59,7 @@ public class SystemIndicesUpgradeIT extends AbstractRollingTestCase {
Request bulk = new Request("POST", "/_bulk");
bulk.addParameter("refresh", "true");
if (UPGRADE_FROM_VERSION.before(Version.V_7_0_0)) {
if (UPGRADE_FROM_VERSION.before(LegacyESVersion.V_7_0_0)) {
bulk.setJsonEntity("{\"index\": {\"_index\": \"test_index_old\", \"_type\" : \"_doc\"}}\n" +
"{\"f1\": \"v1\", \"f2\": \"v2\"}\n");
} else {
@ -90,7 +91,7 @@ public class SystemIndicesUpgradeIT extends AbstractRollingTestCase {
// make sure .tasks index exists
Request getTasksIndex = new Request("GET", "/.tasks");
getTasksIndex.addParameter("allow_no_indices", "false");
if (UPGRADE_FROM_VERSION.before(Version.V_7_0_0)) {
if (UPGRADE_FROM_VERSION.before(LegacyESVersion.V_7_0_0)) {
getTasksIndex.addParameter("include_type_name", "false");
}

View File

@ -32,7 +32,7 @@
package org.opensearch.upgrades;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.client.Request;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.Strings;
@ -95,7 +95,7 @@ public class TranslogPolicyIT extends AbstractFullClusterRestartTestCase {
@Before
public void setType() {
type = getOldClusterVersion().before(Version.V_6_7_0) ? "doc" : "_doc";
type = getOldClusterVersion().before(LegacyESVersion.V_6_7_0) ? "doc" : "_doc";
}
public void testEmptyIndex() throws Exception {
@ -103,7 +103,7 @@ public class TranslogPolicyIT extends AbstractFullClusterRestartTestCase {
final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, between(0, 1));
if (getOldClusterVersion().onOrAfter(Version.V_6_5_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_5_0)) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
}
if (randomBoolean()) {
@ -121,7 +121,7 @@ public class TranslogPolicyIT extends AbstractFullClusterRestartTestCase {
final Settings.Builder settings = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1);
if (getOldClusterVersion().onOrAfter(Version.V_6_5_0)) {
if (getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_5_0)) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
}
if (randomBoolean()) {

View File

@ -39,6 +39,7 @@ import org.apache.lucene.search.SortedSetSortField;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.Constants;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.admin.cluster.state.ClusterStateRequest;
import org.opensearch.action.admin.cluster.state.ClusterStateResponse;
@ -370,7 +371,7 @@ public class SplitIndexIT extends OpenSearchIntegTestCase {
public void testCreateSplitIndex() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(2);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_rc2, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0_rc2, Version.CURRENT);
prepareCreate("source").setSettings(Settings.builder().put(indexSettings())
.put("number_of_shards", 1)
.put("index.version.created", version)

View File

@ -32,6 +32,7 @@
package org.opensearch.gateway;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.admin.indices.flush.SyncedFlushResponse;
import org.opensearch.action.admin.indices.stats.ShardStats;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -374,7 +375,7 @@ public class ReplicaShardAllocatorIT extends OpenSearchIntegTestCase {
}
/**
* If the recovery source is on an old node (before <pre>{@link org.opensearch.Version#V_7_2_0}</pre>) then the recovery target
* If the recovery source is on an old node (before <pre>{@link LegacyESVersion#V_7_2_0}</pre>) then the recovery target
* won't have the safe commit after phase1 because the recovery source does not send the global checkpoint in the clean_files
* step. And if the recovery fails and retries, then the recovery stage might not transition properly. This test simulates
* this behavior by changing the global checkpoint in phase1 to unassigned.

View File

@ -32,7 +32,7 @@
package org.opensearch.get;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.admin.indices.alias.Alias;
import org.opensearch.action.get.GetResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -59,7 +59,8 @@ public class LegacyGetActionIT extends OpenSearchIntegTestCase {
.setSettings(
Settings.builder()
.put("index.refresh_interval", -1)
.put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.V_6_0_0))); // multi-types in 6.0.0
// multi-types in 6.0.0
.put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), LegacyESVersion.V_6_0_0)));
try (XContentBuilder source = jsonBuilder().startObject().field("field1", "value").endObject()) {
client()

View File

@ -31,6 +31,7 @@
package org.opensearch.index.seqno;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.UUIDs;
@ -73,7 +74,7 @@ public class PeerRecoveryRetentionLeaseCreationIT extends OpenSearchIntegTestCas
.put(IndexMetadata.SETTING_VERSION_CREATED,
// simulate a version which supports soft deletes (v6.5.0-and-later) with which this node is compatible
VersionUtils.randomVersionBetween(random(),
Version.max(Version.CURRENT.minimumIndexCompatibilityVersion(), Version.V_6_5_0), Version.CURRENT))));
Version.max(Version.CURRENT.minimumIndexCompatibilityVersion(), LegacyESVersion.V_6_5_0), Version.CURRENT))));
ensureGreen("index");
// Change the node ID so that the persisted retention lease no longer applies.

View File

@ -32,6 +32,7 @@
package org.opensearch.indices;
import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchException;
import org.opensearch.Version;
import org.opensearch.action.admin.cluster.state.ClusterStateResponse;
@ -227,7 +228,7 @@ public class IndicesLifecycleListenerIT extends OpenSearchIntegTestCase {
assertThat(stateChangeListenerNode1.afterCloseSettings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1), equalTo(6));
assertThat(stateChangeListenerNode1.afterCloseSettings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1), equalTo(1));
if (Version.CURRENT.onOrAfter(Version.V_7_2_0)) {
if (Version.CURRENT.onOrAfter(LegacyESVersion.V_7_2_0)) {
assertShardStatesMatch(stateChangeListenerNode1, 6, CLOSED, CREATED, RECOVERING, POST_RECOVERY, STARTED);
assertShardStatesMatch(stateChangeListenerNode2, 6, CLOSED, CREATED, RECOVERING, POST_RECOVERY, STARTED);
} else {

View File

@ -32,7 +32,7 @@
package org.opensearch.indices.mapping;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -74,7 +74,7 @@ public class LegacyUpdateMappingIntegrationIT extends OpenSearchIntegTestCase {
.admin()
.indices()
.prepareCreate("test")
.setSettings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.V_6_3_0).build())
.setSettings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, LegacyESVersion.V_6_3_0).build())
.addMapping(MapperService.DEFAULT_MAPPING, defaultMapping)
.get();
}

View File

@ -32,7 +32,7 @@
package org.opensearch.indices.stats;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
import org.opensearch.action.admin.indices.stats.IndicesStatsResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -59,7 +59,7 @@ public class LegacyIndexStatsIT extends OpenSearchIntegTestCase {
.admin()
.indices()
.prepareCreate("test1")
.setSettings(Settings.builder().put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.V_6_0_0))
.setSettings(Settings.builder().put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), LegacyESVersion.V_6_0_0))
.addMapping("_doc", "bar", "type=text,fielddata=true", "baz", "type=text,fielddata=true")
.get());

View File

@ -31,6 +31,7 @@
package org.opensearch.search.aggregations.bucket;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchPhaseExecutionException;
@ -77,7 +78,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
return false;
}
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT);
private Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception {
XContentBuilder source = jsonBuilder().startObject().field("city", name);

View File

@ -34,6 +34,7 @@ package org.opensearch.search.aggregations.bucket;
import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap;
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchResponse;
@ -74,7 +75,7 @@ public class GeoHashGridIT extends OpenSearchIntegTestCase {
return false;
}
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
private Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
static ObjectIntMap<String> expectedDocCountsForGeoHash = null;

View File

@ -32,6 +32,7 @@
package org.opensearch.search.functionscore;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.ActionFuture;
import org.opensearch.action.index.IndexRequestBuilder;
@ -623,7 +624,7 @@ public class DecayFunctionScoreIT extends OpenSearchIntegTestCase {
}
public void testManyDocsLin() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("test").field("type", "text").endObject().startObject("date").field("type", "date")

View File

@ -32,6 +32,7 @@
package org.opensearch.search.geo;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -60,7 +61,7 @@ public class GeoBoundingBoxQueryIT extends OpenSearchIntegTestCase {
}
public void testSimpleBoundingBoxTest() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
@ -132,7 +133,7 @@ public class GeoBoundingBoxQueryIT extends OpenSearchIntegTestCase {
}
public void testLimit2BoundingBox() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
@ -187,7 +188,7 @@ public class GeoBoundingBoxQueryIT extends OpenSearchIntegTestCase {
}
public void testCompleteLonRange() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")

View File

@ -32,6 +32,7 @@
package org.opensearch.search.geo;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.search.SearchRequestBuilder;
import org.opensearch.action.search.SearchResponse;
@ -117,7 +118,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
@Before
public void setupTestIndex() throws IOException {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")

View File

@ -40,6 +40,7 @@ import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.opensearch.action.bulk.BulkItemResponse;
@ -382,7 +383,7 @@ public class GeoFilterIT extends OpenSearchIntegTestCase {
public void testBulk() throws Exception {
byte[] bulkAction = unZipData("/org/opensearch/search/geo/gzippedmap.gz");
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()

View File

@ -32,6 +32,7 @@
package org.opensearch.search.geo;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -62,7 +63,7 @@ public class GeoPolygonIT extends OpenSearchIntegTestCase {
@Override
protected void setupSuiteScopeCluster() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();

View File

@ -32,6 +32,7 @@
package org.opensearch.search.sort;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -68,7 +69,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
}
public void testDistanceSortingMVFields() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
@ -195,7 +196,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
// Regression bug:
// https://github.com/elastic/elasticsearch/issues/2851
public void testDistanceSortingWithMissingGeoPoint() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
@ -238,7 +239,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
}
public void testDistanceSortingNestedFields() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("company").startObject("properties")
@ -387,7 +388,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
* Issue 3073
*/
public void testGeoDistanceFilter() throws IOException {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
double lat = 40.720611;

View File

@ -32,6 +32,7 @@
package org.opensearch.search.sort;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.cluster.metadata.IndexMetadata;
@ -82,7 +83,7 @@ public class GeoDistanceSortBuilderIT extends OpenSearchIntegTestCase {
* 1 2 3 4 5 6 7
*/
Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT);
: VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder();
@ -156,7 +157,7 @@ public class GeoDistanceSortBuilderIT extends OpenSearchIntegTestCase {
* d2 = (0, 1), (0, 5), (0, 6); so avg. distance is 4, median distance is 5
*/
Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT);
: VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder();
@ -221,7 +222,7 @@ public class GeoDistanceSortBuilderIT extends OpenSearchIntegTestCase {
* 1 2 3 4 5 6
*/
Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT);
: VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder();

View File

@ -202,10 +202,10 @@ public class Build {
// TODO - clean this up when OSS flavor is removed in all of the code base
// (Integ test zip still write OSS as distribution)
// See issue: https://github.com/opendistro-for-elasticsearch/search/issues/159
if (in.getVersion().onOrAfter(Version.V_6_3_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_3_0)) {
flavor = in.readString();
}
if (in.getVersion().onOrAfter(Version.V_6_3_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_3_0)) {
// be lenient when reading on the wire, the enumeration values from other versions might be different than what we know
type = Type.fromDisplayName(in.readString(), false);
} else {
@ -216,7 +216,7 @@ public class Build {
boolean snapshot = in.readBoolean();
final String version;
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) {
version = in.readString();
} else {
version = in.getVersion().toString();
@ -229,12 +229,12 @@ public class Build {
// TODO - clean up this code when we remove all v6 bwc tests.
// TODO - clean this up when OSS flavor is removed in all of the code base
// See issue: https://github.com/opendistro-for-elasticsearch/search/issues/159
if (out.getVersion().onOrAfter(Version.V_6_3_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_3_0)) {
out.writeString("oss");
}
if (out.getVersion().onOrAfter(Version.V_6_3_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_3_0)) {
final Type buildType;
if (out.getVersion().before(Version.V_6_7_0) && build.type() == Type.DOCKER) {
if (out.getVersion().before(LegacyESVersion.V_6_7_0) && build.type() == Type.DOCKER) {
buildType = Type.TAR;
} else {
buildType = build.type();
@ -244,7 +244,7 @@ public class Build {
out.writeString(build.hash());
out.writeString(build.date());
out.writeBoolean(build.isSnapshot());
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) {
out.writeString(build.getQualifiedVersion());
}
}

View File

@ -0,0 +1,159 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch;
/**
* The Contents of this file were originally moved from {@link Version}.
*
* This class keeps all the supported OpenSearch predecessor versions for
* backward compatibility purpose.
*/
public class LegacyESVersion extends Version {
public static final LegacyESVersion V_6_0_0_alpha1 =
new LegacyESVersion(6000001, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final LegacyESVersion V_6_0_0_alpha2 =
new LegacyESVersion(6000002, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final LegacyESVersion V_6_0_0_beta1 =
new LegacyESVersion(6000026, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final LegacyESVersion V_6_0_0_beta2 =
new LegacyESVersion(6000027, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final LegacyESVersion V_6_0_0_rc1 =
new LegacyESVersion(6000051, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final LegacyESVersion V_6_0_0_rc2 =
new LegacyESVersion(6000052, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final LegacyESVersion V_6_0_0 =
new LegacyESVersion(6000099, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final LegacyESVersion V_6_0_1 =
new LegacyESVersion(6000199, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final LegacyESVersion V_6_1_0 = new LegacyESVersion(6010099, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final LegacyESVersion V_6_1_1 = new LegacyESVersion(6010199, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final LegacyESVersion V_6_1_2 = new LegacyESVersion(6010299, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final LegacyESVersion V_6_1_3 = new LegacyESVersion(6010399, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final LegacyESVersion V_6_1_4 = new LegacyESVersion(6010499, org.apache.lucene.util.Version.LUCENE_7_1_0);
// The below version is missing from the 7.3 JAR
private static final org.apache.lucene.util.Version LUCENE_7_2_1 = org.apache.lucene.util.Version.fromBits(7, 2, 1);
public static final LegacyESVersion V_6_2_0 = new LegacyESVersion(6020099, LUCENE_7_2_1);
public static final LegacyESVersion V_6_2_1 = new LegacyESVersion(6020199, LUCENE_7_2_1);
public static final LegacyESVersion V_6_2_2 = new LegacyESVersion(6020299, LUCENE_7_2_1);
public static final LegacyESVersion V_6_2_3 = new LegacyESVersion(6020399, LUCENE_7_2_1);
public static final LegacyESVersion V_6_2_4 = new LegacyESVersion(6020499, LUCENE_7_2_1);
public static final LegacyESVersion V_6_3_0 = new LegacyESVersion(6030099, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final LegacyESVersion V_6_3_1 = new LegacyESVersion(6030199, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final LegacyESVersion V_6_3_2 = new LegacyESVersion(6030299, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final LegacyESVersion V_6_4_0 = new LegacyESVersion(6040099, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final LegacyESVersion V_6_4_1 = new LegacyESVersion(6040199, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final LegacyESVersion V_6_4_2 = new LegacyESVersion(6040299, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final LegacyESVersion V_6_4_3 = new LegacyESVersion(6040399, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final LegacyESVersion V_6_5_0 = new LegacyESVersion(6050099, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final LegacyESVersion V_6_5_1 = new LegacyESVersion(6050199, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final LegacyESVersion V_6_5_2 = new LegacyESVersion(6050299, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final LegacyESVersion V_6_5_3 = new LegacyESVersion(6050399, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final LegacyESVersion V_6_5_4 = new LegacyESVersion(6050499, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final LegacyESVersion V_6_6_0 = new LegacyESVersion(6060099, org.apache.lucene.util.Version.LUCENE_7_6_0);
public static final LegacyESVersion V_6_6_1 = new LegacyESVersion(6060199, org.apache.lucene.util.Version.LUCENE_7_6_0);
public static final LegacyESVersion V_6_6_2 = new LegacyESVersion(6060299, org.apache.lucene.util.Version.LUCENE_7_6_0);
public static final LegacyESVersion V_6_7_0 = new LegacyESVersion(6070099, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final LegacyESVersion V_6_7_1 = new LegacyESVersion(6070199, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final LegacyESVersion V_6_7_2 = new LegacyESVersion(6070299, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final LegacyESVersion V_6_8_0 = new LegacyESVersion(6080099, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final LegacyESVersion V_6_8_1 = new LegacyESVersion(6080199, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final LegacyESVersion V_6_8_2 = new LegacyESVersion(6080299, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final LegacyESVersion V_6_8_3 = new LegacyESVersion(6080399, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final LegacyESVersion V_6_8_4 = new LegacyESVersion(6080499, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final LegacyESVersion V_6_8_5 = new LegacyESVersion(6080599, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final LegacyESVersion V_6_8_6 = new LegacyESVersion(6080699, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final LegacyESVersion V_6_8_7 = new LegacyESVersion(6080799, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final LegacyESVersion V_6_8_8 = new LegacyESVersion(6080899, org.apache.lucene.util.Version.LUCENE_7_7_2);
// Version constant for Lucene 7.7.3 release with index corruption bug fix
private static final org.apache.lucene.util.Version LUCENE_7_7_3 = org.apache.lucene.util.Version.fromBits(7, 7, 3);
public static final LegacyESVersion V_6_8_9 = new LegacyESVersion(6080999, LUCENE_7_7_3);
public static final LegacyESVersion V_6_8_10 = new LegacyESVersion(6081099, LUCENE_7_7_3);
public static final LegacyESVersion V_6_8_11 = new LegacyESVersion(6081199, LUCENE_7_7_3);
public static final LegacyESVersion V_6_8_12 = new LegacyESVersion(6081299, LUCENE_7_7_3);
public static final LegacyESVersion V_6_8_13 = new LegacyESVersion(6081399, LUCENE_7_7_3);
public static final LegacyESVersion V_6_8_14 = new LegacyESVersion(6081499, LUCENE_7_7_3);
public static final LegacyESVersion V_6_8_15 = new LegacyESVersion(6081599, org.apache.lucene.util.Version.LUCENE_7_7_3);
public static final LegacyESVersion V_7_0_0 = new LegacyESVersion(7000099, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final LegacyESVersion V_7_0_1 = new LegacyESVersion(7000199, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final LegacyESVersion V_7_1_0 = new LegacyESVersion(7010099, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final LegacyESVersion V_7_1_1 = new LegacyESVersion(7010199, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final LegacyESVersion V_7_2_0 = new LegacyESVersion(7020099, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final LegacyESVersion V_7_2_1 = new LegacyESVersion(7020199, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final LegacyESVersion V_7_3_0 = new LegacyESVersion(7030099, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final LegacyESVersion V_7_3_1 = new LegacyESVersion(7030199, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final LegacyESVersion V_7_3_2 = new LegacyESVersion(7030299, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final LegacyESVersion V_7_4_0 = new LegacyESVersion(7040099, org.apache.lucene.util.Version.LUCENE_8_2_0);
public static final LegacyESVersion V_7_4_1 = new LegacyESVersion(7040199, org.apache.lucene.util.Version.LUCENE_8_2_0);
public static final LegacyESVersion V_7_4_2 = new LegacyESVersion(7040299, org.apache.lucene.util.Version.LUCENE_8_2_0);
public static final LegacyESVersion V_7_5_0 = new LegacyESVersion(7050099, org.apache.lucene.util.Version.LUCENE_8_3_0);
public static final LegacyESVersion V_7_5_1 = new LegacyESVersion(7050199, org.apache.lucene.util.Version.LUCENE_8_3_0);
public static final LegacyESVersion V_7_5_2 = new LegacyESVersion(7050299, org.apache.lucene.util.Version.LUCENE_8_3_0);
public static final LegacyESVersion V_7_6_0 = new LegacyESVersion(7060099, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_6_1 = new LegacyESVersion(7060199, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_6_2 = new LegacyESVersion(7060299, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_7_0 = new LegacyESVersion(7070099, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_7_1 = new LegacyESVersion(7070199, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_8_0 = new LegacyESVersion(7080099, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_8_1 = new LegacyESVersion(7080199, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_9_0 = new LegacyESVersion(7090099, org.apache.lucene.util.Version.LUCENE_8_6_0);
public static final LegacyESVersion V_7_9_1 = new LegacyESVersion(7090199, org.apache.lucene.util.Version.LUCENE_8_6_2);
public static final LegacyESVersion V_7_9_2 = new LegacyESVersion(7090299, org.apache.lucene.util.Version.LUCENE_8_6_2);
public static final LegacyESVersion V_7_9_3 = new LegacyESVersion(7090399, org.apache.lucene.util.Version.LUCENE_8_6_2);
public static final LegacyESVersion V_7_10_0 = new LegacyESVersion(7100099, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final LegacyESVersion V_7_10_1 = new LegacyESVersion(7100199, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final LegacyESVersion V_7_10_2 = new LegacyESVersion(7100299, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final LegacyESVersion V_7_10_3 = new LegacyESVersion(7100399, org.apache.lucene.util.Version.LUCENE_8_7_0);
protected LegacyESVersion(int id, org.apache.lucene.util.Version luceneVersion) {
// flip the 28th bit of the version id
// this will be flipped back in the parent class to correctly identify as a legacy version;
// giving backwards compatibility with legacy systems
super(id ^ 0x08000000, luceneVersion);
}
@Override
public boolean isBeta() {
return major < 5 ? build < 50 : build >= 25 && build < 50;
}
/**
* Returns true iff this version is an alpha version
* Note: This has been introduced in version 5 of the OpenSearch predecessor. Previous versions will never
* have an alpha version.
*/
@Override
public boolean isAlpha() {
return major < 5 ? false : build < 25;
}
}

View File

@ -299,7 +299,7 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
public static OpenSearchException readException(StreamInput input, int id) throws IOException {
CheckedFunction<StreamInput, ? extends OpenSearchException, IOException> opensearchException = ID_TO_SUPPLIER.get(id);
if (opensearchException == null) {
if (id == 127 && input.getVersion().before(Version.V_7_5_0)) {
if (id == 127 && input.getVersion().before(LegacyESVersion.V_7_5_0)) {
// was SearchContextException
return new SearchException(input);
}
@ -1016,53 +1016,53 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
org.opensearch.env.ShardLockObtainFailedException::new, 147, UNKNOWN_VERSION_ADDED),
// 148 was UnknownNamedObjectException
TOO_MANY_BUCKETS_EXCEPTION(MultiBucketConsumerService.TooManyBucketsException.class,
MultiBucketConsumerService.TooManyBucketsException::new, 149, Version.V_6_2_0),
MultiBucketConsumerService.TooManyBucketsException::new, 149, LegacyESVersion.V_6_2_0),
COORDINATION_STATE_REJECTED_EXCEPTION(org.opensearch.cluster.coordination.CoordinationStateRejectedException.class,
org.opensearch.cluster.coordination.CoordinationStateRejectedException::new, 150, Version.V_7_0_0),
org.opensearch.cluster.coordination.CoordinationStateRejectedException::new, 150, LegacyESVersion.V_7_0_0),
SNAPSHOT_IN_PROGRESS_EXCEPTION(org.opensearch.snapshots.SnapshotInProgressException.class,
org.opensearch.snapshots.SnapshotInProgressException::new, 151, Version.V_6_7_0),
org.opensearch.snapshots.SnapshotInProgressException::new, 151, LegacyESVersion.V_6_7_0),
NO_SUCH_REMOTE_CLUSTER_EXCEPTION(org.opensearch.transport.NoSuchRemoteClusterException.class,
org.opensearch.transport.NoSuchRemoteClusterException::new, 152, Version.V_6_7_0),
org.opensearch.transport.NoSuchRemoteClusterException::new, 152, LegacyESVersion.V_6_7_0),
RETENTION_LEASE_ALREADY_EXISTS_EXCEPTION(
org.opensearch.index.seqno.RetentionLeaseAlreadyExistsException.class,
org.opensearch.index.seqno.RetentionLeaseAlreadyExistsException::new,
153,
Version.V_6_7_0),
LegacyESVersion.V_6_7_0),
RETENTION_LEASE_NOT_FOUND_EXCEPTION(
org.opensearch.index.seqno.RetentionLeaseNotFoundException.class,
org.opensearch.index.seqno.RetentionLeaseNotFoundException::new,
154,
Version.V_6_7_0),
LegacyESVersion.V_6_7_0),
SHARD_NOT_IN_PRIMARY_MODE_EXCEPTION(
org.opensearch.index.shard.ShardNotInPrimaryModeException.class,
org.opensearch.index.shard.ShardNotInPrimaryModeException::new,
155,
Version.V_6_8_1),
LegacyESVersion.V_6_8_1),
RETENTION_LEASE_INVALID_RETAINING_SEQUENCE_NUMBER_EXCEPTION(
org.opensearch.index.seqno.RetentionLeaseInvalidRetainingSeqNoException.class,
org.opensearch.index.seqno.RetentionLeaseInvalidRetainingSeqNoException::new,
156,
Version.V_7_5_0),
LegacyESVersion.V_7_5_0),
INGEST_PROCESSOR_EXCEPTION(
org.opensearch.ingest.IngestProcessorException.class,
org.opensearch.ingest.IngestProcessorException::new,
157,
Version.V_7_5_0),
LegacyESVersion.V_7_5_0),
PEER_RECOVERY_NOT_FOUND_EXCEPTION(
org.opensearch.indices.recovery.PeerRecoveryNotFound.class,
org.opensearch.indices.recovery.PeerRecoveryNotFound::new,
158,
Version.V_7_9_0),
LegacyESVersion.V_7_9_0),
NODE_HEALTH_CHECK_FAILURE_EXCEPTION(
org.opensearch.cluster.coordination.NodeHealthCheckFailureException.class,
org.opensearch.cluster.coordination.NodeHealthCheckFailureException::new,
159,
Version.V_7_9_0),
LegacyESVersion.V_7_9_0),
NO_SEED_NODE_LEFT_EXCEPTION(
org.opensearch.transport.NoSeedNodeLeftException.class,
org.opensearch.transport.NoSeedNodeLeftException::new,
160,
Version.V_7_10_0);
LegacyESVersion.V_7_10_0);
final Class<? extends OpenSearchException> exceptionClass;
final CheckedFunction<StreamInput, ? extends OpenSearchException, IOException> constructor;

View File

@ -72,101 +72,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final int V_EMPTY_ID = 0;
public static final Version V_EMPTY = new Version(V_EMPTY_ID, org.apache.lucene.util.Version.LATEST);
public static final Version V_6_0_0_alpha1 =
new Version(6000001, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final Version V_6_0_0_alpha2 =
new Version(6000002, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final Version V_6_0_0_beta1 =
new Version(6000026, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final Version V_6_0_0_beta2 =
new Version(6000027, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final Version V_6_0_0_rc1 =
new Version(6000051, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final Version V_6_0_0_rc2 =
new Version(6000052, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final Version V_6_0_0 =
new Version(6000099, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final Version V_6_0_1 =
new Version(6000199, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final Version V_6_1_0 = new Version(6010099, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final Version V_6_1_1 = new Version(6010199, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final Version V_6_1_2 = new Version(6010299, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final Version V_6_1_3 = new Version(6010399, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final Version V_6_1_4 = new Version(6010499, org.apache.lucene.util.Version.LUCENE_7_1_0);
// The below version is missing from the 7.3 JAR
private static final org.apache.lucene.util.Version LUCENE_7_2_1 = org.apache.lucene.util.Version.fromBits(7, 2, 1);
// Version constant for Lucene 7.7.3 release with index corruption bug fix
private static final org.apache.lucene.util.Version LUCENE_7_7_3 = org.apache.lucene.util.Version.fromBits(7, 7, 3);
public static final Version V_6_2_0 = new Version(6020099, LUCENE_7_2_1);
public static final Version V_6_2_1 = new Version(6020199, LUCENE_7_2_1);
public static final Version V_6_2_2 = new Version(6020299, LUCENE_7_2_1);
public static final Version V_6_2_3 = new Version(6020399, LUCENE_7_2_1);
public static final Version V_6_2_4 = new Version(6020499, LUCENE_7_2_1);
public static final Version V_6_3_0 = new Version(6030099, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final Version V_6_3_1 = new Version(6030199, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final Version V_6_3_2 = new Version(6030299, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final Version V_6_4_0 = new Version(6040099, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final Version V_6_4_1 = new Version(6040199, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final Version V_6_4_2 = new Version(6040299, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final Version V_6_4_3 = new Version(6040399, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final Version V_6_5_0 = new Version(6050099, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final Version V_6_5_1 = new Version(6050199, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final Version V_6_5_2 = new Version(6050299, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final Version V_6_5_3 = new Version(6050399, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final Version V_6_5_4 = new Version(6050499, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final Version V_6_6_0 = new Version(6060099, org.apache.lucene.util.Version.LUCENE_7_6_0);
public static final Version V_6_6_1 = new Version(6060199, org.apache.lucene.util.Version.LUCENE_7_6_0);
public static final Version V_6_6_2 = new Version(6060299, org.apache.lucene.util.Version.LUCENE_7_6_0);
public static final Version V_6_7_0 = new Version(6070099, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final Version V_6_7_1 = new Version(6070199, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final Version V_6_7_2 = new Version(6070299, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final Version V_6_8_0 = new Version(6080099, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final Version V_6_8_1 = new Version(6080199, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final Version V_6_8_2 = new Version(6080299, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final Version V_6_8_3 = new Version(6080399, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final Version V_6_8_4 = new Version(6080499, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final Version V_6_8_5 = new Version(6080599, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final Version V_6_8_6 = new Version(6080699, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final Version V_6_8_7 = new Version(6080799, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final Version V_6_8_8 = new Version(6080899, org.apache.lucene.util.Version.LUCENE_7_7_2);
public static final Version V_6_8_9 = new Version(6080999, LUCENE_7_7_3);
public static final Version V_6_8_10 = new Version(6081099, LUCENE_7_7_3);
public static final Version V_6_8_11 = new Version(6081199, LUCENE_7_7_3);
public static final Version V_6_8_12 = new Version(6081299, LUCENE_7_7_3);
public static final Version V_6_8_13 = new Version(6081399, LUCENE_7_7_3);
public static final Version V_6_8_14 = new Version(6081499, LUCENE_7_7_3);
public static final Version V_6_8_15 = new Version(6081599, org.apache.lucene.util.Version.LUCENE_7_7_3);
public static final Version V_7_0_0 = new Version(7000099, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final Version V_7_0_1 = new Version(7000199, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final Version V_7_1_0 = new Version(7010099, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final Version V_7_1_1 = new Version(7010199, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final Version V_7_2_0 = new Version(7020099, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final Version V_7_2_1 = new Version(7020199, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final Version V_7_3_0 = new Version(7030099, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final Version V_7_3_1 = new Version(7030199, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final Version V_7_3_2 = new Version(7030299, org.apache.lucene.util.Version.LUCENE_8_1_0);
public static final Version V_7_4_0 = new Version(7040099, org.apache.lucene.util.Version.LUCENE_8_2_0);
public static final Version V_7_4_1 = new Version(7040199, org.apache.lucene.util.Version.LUCENE_8_2_0);
public static final Version V_7_4_2 = new Version(7040299, org.apache.lucene.util.Version.LUCENE_8_2_0);
public static final Version V_7_5_0 = new Version(7050099, org.apache.lucene.util.Version.LUCENE_8_3_0);
public static final Version V_7_5_1 = new Version(7050199, org.apache.lucene.util.Version.LUCENE_8_3_0);
public static final Version V_7_5_2 = new Version(7050299, org.apache.lucene.util.Version.LUCENE_8_3_0);
public static final Version V_7_6_0 = new Version(7060099, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final Version V_7_6_1 = new Version(7060199, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final Version V_7_6_2 = new Version(7060299, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final Version V_7_7_0 = new Version(7070099, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final Version V_7_7_1 = new Version(7070199, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final Version V_7_8_0 = new Version(7080099, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final Version V_7_8_1 = new Version(7080199, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final Version V_7_9_0 = new Version(7090099, org.apache.lucene.util.Version.LUCENE_8_6_0);
public static final Version V_7_9_1 = new Version(7090199, org.apache.lucene.util.Version.LUCENE_8_6_2);
public static final Version V_7_9_2 = new Version(7090299, org.apache.lucene.util.Version.LUCENE_8_6_2);
public static final Version V_7_9_3 = new Version(7090399, org.apache.lucene.util.Version.LUCENE_8_6_2);
public static final Version V_7_10_0 = new Version(7100099, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final Version V_7_10_1 = new Version(7100199, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final Version V_7_10_2 = new Version(7100299, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final Version V_7_10_3 = new Version(7100399, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final Version CURRENT = V_7_10_3;
public static final Version V_1_0_0 = new Version(1000099, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final Version CURRENT = V_1_0_0;
private static final ImmutableOpenIntMap<Version> idToVersion;
private static final ImmutableOpenMap<String, Version> stringToVersion;
@ -175,8 +82,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
final ImmutableOpenIntMap.Builder<Version> builder = ImmutableOpenIntMap.builder();
final ImmutableOpenMap.Builder<String, Version> builderByString = ImmutableOpenMap.builder();
for (final Field declaredField : Version.class.getFields()) {
if (declaredField.getType().equals(Version.class)) {
for (final Field declaredField : LegacyESVersion.class.getFields()) {
if (declaredField.getType().equals(Version.class) || declaredField.getType().equals(LegacyESVersion.class)) {
final String fieldName = declaredField.getName();
if (fieldName.equals("CURRENT") || fieldName.equals("V_EMPTY")) {
continue;
@ -188,13 +95,18 @@ public class Version implements Comparable<Version>, ToXContentFragment {
if (Assertions.ENABLED) {
final String[] fields = fieldName.split("_");
if (fields.length == 5) {
assert fields[1].equals("6") && fields[2].equals("0") :
assert (fields[1].equals("1") || fields[1].equals("6")) && fields[2].equals("0") :
"field " + fieldName + " should not have a build qualifier";
} else {
final int major = Integer.valueOf(fields[1]) * 1000000;
final int minor = Integer.valueOf(fields[2]) * 10000;
final int revision = Integer.valueOf(fields[3]) * 100;
final int expectedId = major + minor + revision + 99;
final int expectedId;
if (fields[1].equals("1")) {
expectedId = 0x08000000 ^ (major + minor + revision + 99);
} else {
expectedId = (major + minor + revision + 99);
}
assert version.id == expectedId :
"expected version [" + fieldName + "] to have id [" + expectedId + "] but was [" + version.id + "]";
}
@ -236,7 +148,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
// least correct for patch versions of known minors since we never
// update the Lucene dependency for patch versions.
List<Version> versions = DeclaredVersionsHolder.DECLARED_VERSIONS;
Version tmp = new Version(id, org.apache.lucene.util.Version.LATEST);
Version tmp = id < MASK ? new LegacyESVersion(id, org.apache.lucene.util.Version.LATEST) :
new Version(id ^ MASK, org.apache.lucene.util.Version.LATEST);
int index = Collections.binarySearch(versions, tmp);
if (index < 0) {
index = -2 - index;
@ -253,7 +166,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
} else {
luceneVersion = versions.get(index).luceneVersion;
}
return new Version(id, luceneVersion);
return id < MASK ? new LegacyESVersion(id, luceneVersion) : new Version(id ^ MASK, luceneVersion);
}
/**
@ -264,7 +177,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
*/
public static Version indexCreated(Settings indexSettings) {
final Version indexVersion = IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(indexSettings);
if (indexVersion == V_EMPTY) {
if (indexVersion.equals(V_EMPTY)) {
final String message = String.format(
Locale.ROOT,
"[%s] is not present in the index settings for index with UUID [%s]",
@ -279,6 +192,14 @@ public class Version implements Comparable<Version>, ToXContentFragment {
out.writeVInt(version.id);
}
public static int computeLegacyID(int major, int minor, int revision, int build) {
return major * 1000000 + minor * 10000 + revision * 100 + build;
}
public static int computeID(int major, int minor, int revision, int build) {
return computeLegacyID(major, minor, revision, build) ^ MASK;
}
/**
* Returns the minimum version between the 2.
*/
@ -355,6 +276,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
}
}
public static final int MASK = 0x08000000;
public final int id;
public final byte major;
public final byte minor;
@ -363,7 +285,14 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public final org.apache.lucene.util.Version luceneVersion;
Version(int id, org.apache.lucene.util.Version luceneVersion) {
this.id = id;
// flip the 28th bit of the ID; identify as an opensearch vs legacy system:
// we start from version 1 for opensearch, so ignore the 0 (empty) version
if(id != 0) {
this.id = id ^ MASK;
id &= 0xF7FFFFFF;
} else {
this.id = id;
}
this.major = (byte) ((id / 1000000) % 100);
this.minor = (byte) ((id / 10000) % 100);
this.revision = (byte) ((id / 100) % 100);
@ -402,8 +331,9 @@ public class Version implements Comparable<Version>, ToXContentFragment {
* is not cheap. Since computing the minimum compatibility version can occur often, we use this holder to compute the declared versions
* lazily once.
*/
private static class DeclaredVersionsHolder {
static final List<Version> DECLARED_VERSIONS = Collections.unmodifiableList(getDeclaredVersions(Version.class));
static class DeclaredVersionsHolder {
// use LegacyESVersion.class since it inherits Version fields
protected static final List<Version> DECLARED_VERSIONS = Collections.unmodifiableList(getDeclaredVersions(LegacyESVersion.class));
}
// lazy initialized because we don't yet have the declared versions ready when instantiating the cached Version
@ -431,7 +361,9 @@ public class Version implements Comparable<Version>, ToXContentFragment {
}
private Version computeMinCompatVersion() {
if (major == 6) {
if (major == 1) {
return Version.fromId(6080099);
} else if (major == 6) {
// force the minimum compatibility for version 6 to 5.6 since we don't reference version 5 anymore
return Version.fromId(5060099);
} else if (major >= 7) {
@ -471,13 +403,13 @@ public class Version implements Comparable<Version>, ToXContentFragment {
final int bwcMajor;
if (major == 5) {
bwcMajor = 2; // we jumped from 2 to 5
} else if (major == 7) {
return V_6_0_0_beta1;
} else if (major == 7 || major == 1) {
return LegacyESVersion.V_6_0_0_beta1;
} else {
bwcMajor = major - 1;
}
final int bwcMinor = 0;
return Version.min(this, fromId(bwcMajor * 1000000 + bwcMinor * 10000 + 99));
return Version.min(this, fromId((bwcMajor * 1000000 + bwcMinor * 10000 + 99) ));
}
/**
@ -487,7 +419,11 @@ public class Version implements Comparable<Version>, ToXContentFragment {
boolean compatible = onOrAfter(version.minimumCompatibilityVersion())
&& version.onOrAfter(minimumCompatibilityVersion());
assert compatible == false || Math.max(major, version.major) - Math.min(major, version.major) <= 1;
// OpenSearch version 1 is the functional equivalent of predecessor version 7
int a = major == 1 ? 7 : major;
int b = version.major == 1 ? 7 : version.major;
assert compatible == false || Math.max(a, b) - Math.min(a, b) <= 1;
return compatible;
}
@ -553,7 +489,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
}
public boolean isBeta() {
return major < 5 ? build < 50 : build >= 25 && build < 50;
return build >= 25 && build < 50;
}
/**
@ -562,7 +498,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
* have an alpha version.
*/
public boolean isAlpha() {
return major < 5 ? false : build < 25;
return build < 25;
}
public boolean isRC() {
@ -586,7 +522,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
if (false == Modifier.isStatic(mod) && Modifier.isFinal(mod) && Modifier.isPublic(mod)) {
continue;
}
if (field.getType() != Version.class) {
if (field.getType() != Version.class && field.getType() != LegacyESVersion.class) {
continue;
}
switch (field.getName()) {

View File

@ -31,7 +31,7 @@
package org.opensearch.action;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.WriteRequest;
import org.opensearch.action.support.WriteRequest.RefreshPolicy;
import org.opensearch.action.support.WriteResponse;
@ -168,7 +168,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
type = in.readString();
id = in.readString();
version = in.readZLong();
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha1)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
seqNo = in.readZLong();
primaryTerm = in.readVLong();
} else {
@ -317,7 +317,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
out.writeString(type);
out.writeString(id);
out.writeZLong(version);
if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha1)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
out.writeZLong(seqNo);
out.writeVLong(primaryTerm);
}

View File

@ -31,7 +31,7 @@
package org.opensearch.action.admin.cluster.configuration;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.support.master.MasterNodeRequest;
import org.opensearch.cluster.ClusterState;
@ -107,7 +107,7 @@ public class AddVotingConfigExclusionsRequest extends MasterNodeRequest<AddVotin
public AddVotingConfigExclusionsRequest(StreamInput in) throws IOException {
super(in);
nodeDescriptions = in.readStringArray();
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
nodeIds = in.readStringArray();
nodeNames = in.readStringArray();
} else {
@ -229,7 +229,7 @@ public class AddVotingConfigExclusionsRequest extends MasterNodeRequest<AddVotin
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(nodeDescriptions);
if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
out.writeStringArray(nodeIds);
out.writeStringArray(nodeNames);
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.health;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.IndicesRequest;
import org.opensearch.action.support.ActiveShardCount;
@ -85,10 +85,10 @@ public class ClusterHealthRequest extends MasterNodeReadRequest<ClusterHealthReq
if (in.readBoolean()) {
waitForEvents = Priority.readFrom(in);
}
if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_2_0)) {
waitForNoInitializingShards = in.readBoolean();
}
if (in.getVersion().onOrAfter(Version.V_7_2_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
indicesOptions = IndicesOptions.readIndicesOptions(in);
} else {
indicesOptions = IndicesOptions.lenientExpandOpen();
@ -119,10 +119,10 @@ public class ClusterHealthRequest extends MasterNodeReadRequest<ClusterHealthReq
out.writeBoolean(true);
Priority.writeTo(waitForEvents, out);
}
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_2_0)) {
out.writeBoolean(waitForNoInitializingShards);
}
if (out.getVersion().onOrAfter(Version.V_7_2_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
indicesOptions.writeIndicesOptions(out);
}
}

View File

@ -33,6 +33,7 @@
package org.opensearch.action.admin.cluster.node.info;
import org.opensearch.Build;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.support.nodes.BaseNodeResponse;
import org.opensearch.cluster.node.DiscoveryNode;
@ -96,7 +97,7 @@ public class NodeInfo extends BaseNodeResponse {
addInfoIfNonNull(HttpInfo.class, in.readOptionalWriteable(HttpInfo::new));
addInfoIfNonNull(PluginsAndModules.class, in.readOptionalWriteable(PluginsAndModules::new));
addInfoIfNonNull(IngestInfo.class, in.readOptionalWriteable(IngestInfo::new));
if (in.getVersion().onOrAfter(Version.V_7_10_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_10_0)) {
addInfoIfNonNull(AggregationInfo.class, in.readOptionalWriteable(AggregationInfo::new));
}
}
@ -205,7 +206,7 @@ public class NodeInfo extends BaseNodeResponse {
out.writeOptionalWriteable(getInfo(HttpInfo.class));
out.writeOptionalWriteable(getInfo(PluginsAndModules.class));
out.writeOptionalWriteable(getInfo(IngestInfo.class));
if (out.getVersion().onOrAfter(Version.V_7_10_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_10_0)) {
out.writeOptionalWriteable(getInfo(AggregationInfo.class));
}
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.node.info;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
@ -61,7 +61,7 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
public NodesInfoRequest(StreamInput in) throws IOException {
super(in);
requestedMetrics.clear();
if (in.getVersion().before(Version.V_7_7_0)){
if (in.getVersion().before(LegacyESVersion.V_7_7_0)){
// prior to version 8.x, a NodesInfoRequest was serialized as a list
// of booleans in a fixed order
optionallyAddMetric(in.readBoolean(), Metric.SETTINGS.metricName());
@ -163,7 +163,7 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().before(Version.V_7_7_0)){
if (out.getVersion().before(LegacyESVersion.V_7_7_0)){
// prior to version 8.x, a NodesInfoRequest was serialized as a list
// of booleans in a fixed order
out.writeBoolean(Metric.SETTINGS.containedIn(requestedMetrics));

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.node.reload;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;
@ -66,7 +66,7 @@ public class NodesReloadSecureSettingsRequest extends BaseNodesRequest<NodesRelo
public NodesReloadSecureSettingsRequest(StreamInput in) throws IOException {
super(in);
if (in.getVersion().onOrAfter(Version.V_7_7_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_7_0)) {
final BytesReference bytesRef = in.readOptionalBytesReference();
if (bytesRef != null) {
byte[] bytes = BytesReference.toBytes(bytesRef);
@ -112,7 +112,7 @@ public class NodesReloadSecureSettingsRequest extends BaseNodesRequest<NodesRelo
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_7_4_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_4_0)) {
if (this.secureSettingsPassword == null) {
out.writeOptionalBytesReference(null);
} else {

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.node.stats;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodeResponse;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.node.DiscoveryNodeRole;
@ -129,20 +129,20 @@ public class NodeStats extends BaseNodeResponse implements ToXContentFragment {
scriptStats = in.readOptionalWriteable(ScriptStats::new);
discoveryStats = in.readOptionalWriteable(DiscoveryStats::new);
ingestStats = in.readOptionalWriteable(IngestStats::new);
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
adaptiveSelectionStats = in.readOptionalWriteable(AdaptiveSelectionStats::new);
} else {
adaptiveSelectionStats = null;
}
scriptCacheStats = null;
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
if (in.getVersion().before(Version.V_7_9_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
if (in.getVersion().before(LegacyESVersion.V_7_9_0)) {
scriptCacheStats = in.readOptionalWriteable(ScriptCacheStats::new);
} else if (scriptStats != null) {
scriptCacheStats = scriptStats.toScriptCacheStats();
}
}
if (in.getVersion().onOrAfter(Version.V_7_9_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_9_0)) {
indexingPressureStats = in.readOptionalWriteable(IndexingPressureStats::new);
} else {
indexingPressureStats = null;
@ -301,12 +301,12 @@ public class NodeStats extends BaseNodeResponse implements ToXContentFragment {
out.writeOptionalWriteable(scriptStats);
out.writeOptionalWriteable(discoveryStats);
out.writeOptionalWriteable(ingestStats);
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
out.writeOptionalWriteable(adaptiveSelectionStats);
} if (out.getVersion().onOrAfter(Version.V_7_8_0) && out.getVersion().before(Version.V_7_9_0)) {
} if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0) && out.getVersion().before(LegacyESVersion.V_7_9_0)) {
out.writeOptionalWriteable(scriptCacheStats);
}
if (out.getVersion().onOrAfter(Version.V_7_9_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_9_0)) {
out.writeOptionalWriteable(indexingPressureStats);
}
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.node.stats;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.admin.indices.stats.CommonStatsFlags;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;
@ -62,7 +62,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
indices = new CommonStatsFlags(in);
requestedMetrics.clear();
if (in.getVersion().before(Version.V_7_7_0)) {
if (in.getVersion().before(LegacyESVersion.V_7_7_0)) {
optionallyAddMetric(in.readBoolean(), Metric.OS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.PROCESS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.JVM.metricName());
@ -74,7 +74,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
optionallyAddMetric(in.readBoolean(), Metric.SCRIPT.metricName());
optionallyAddMetric(in.readBoolean(), Metric.DISCOVERY.metricName());
optionallyAddMetric(in.readBoolean(), Metric.INGEST.metricName());
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
optionallyAddMetric(in.readBoolean(), Metric.ADAPTIVE_SELECTION.metricName());
}
} else {
@ -200,7 +200,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
indices.writeTo(out);
if (out.getVersion().before(Version.V_7_7_0)) {
if (out.getVersion().before(LegacyESVersion.V_7_7_0)) {
out.writeBoolean(Metric.OS.containedIn(requestedMetrics));
out.writeBoolean(Metric.PROCESS.containedIn(requestedMetrics));
out.writeBoolean(Metric.JVM.containedIn(requestedMetrics));
@ -212,7 +212,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
out.writeBoolean(Metric.SCRIPT.containedIn(requestedMetrics));
out.writeBoolean(Metric.DISCOVERY.containedIn(requestedMetrics));
out.writeBoolean(Metric.INGEST.containedIn(requestedMetrics));
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
out.writeBoolean(Metric.ADAPTIVE_SELECTION.containedIn(requestedMetrics));
}
} else {

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.node.tasks.cancel;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.tasks.BaseTasksRequest;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
@ -58,7 +58,7 @@ public class CancelTasksRequest extends BaseTasksRequest<CancelTasksRequest> {
public CancelTasksRequest(StreamInput in) throws IOException {
super(in);
this.reason = in.readString();
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
waitForCompletion = in.readBoolean();
}
}
@ -67,7 +67,7 @@ public class CancelTasksRequest extends BaseTasksRequest<CancelTasksRequest> {
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(reason);
if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
out.writeBoolean(waitForCompletion);
}
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.node.usage;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodeResponse;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.common.io.stream.StreamInput;
@ -56,7 +56,7 @@ public class NodeUsage extends BaseNodeResponse implements ToXContentFragment {
timestamp = in.readLong();
sinceTime = in.readLong();
restUsage = (Map<String, Long>) in.readGenericValue();
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
aggregationUsage = (Map<String, Object>) in.readGenericValue();
} else {
aggregationUsage = null;
@ -134,7 +134,7 @@ public class NodeUsage extends BaseNodeResponse implements ToXContentFragment {
out.writeLong(timestamp);
out.writeLong(sinceTime);
out.writeGenericValue(restUsage);
if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
out.writeGenericValue(aggregationUsage);
}
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.node.usage;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
@ -47,7 +47,7 @@ public class NodesUsageRequest extends BaseNodesRequest<NodesUsageRequest> {
public NodesUsageRequest(StreamInput in) throws IOException {
super(in);
this.restActions = in.readBoolean();
if (in.getVersion().onOrAfter(Version.V_7_8_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
this.aggregations = in.readBoolean();
}
}
@ -112,7 +112,7 @@ public class NodesUsageRequest extends BaseNodesRequest<NodesUsageRequest> {
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(restActions);
if (out.getVersion().onOrAfter(Version.V_7_8_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
out.writeBoolean(aggregations);
}
}

View File

@ -34,6 +34,7 @@ package org.opensearch.action.admin.cluster.repositories.cleanup;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.action.ActionListener;
import org.opensearch.action.ActionRunnable;
@ -87,7 +88,7 @@ public final class TransportCleanupRepositoryAction extends TransportMasterNodeA
private static final Logger logger = LogManager.getLogger(TransportCleanupRepositoryAction.class);
private static final Version MIN_VERSION = Version.V_7_4_0;
private static final Version MIN_VERSION = LegacyESVersion.V_7_4_0;
private final RepositoriesService repositoriesService;

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.reroute;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.cluster.ClusterModule;
import org.opensearch.cluster.ClusterState;
@ -54,8 +54,8 @@ public class ClusterRerouteResponse extends AcknowledgedResponse implements ToXC
private final RoutingExplanations explanations;
ClusterRerouteResponse(StreamInput in) throws IOException {
super(in, in.getVersion().onOrAfter(Version.V_6_4_0));
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
super(in, in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0));
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
state = ClusterState.readFrom(in, null);
explanations = RoutingExplanations.readFrom(in);
} else {
@ -84,12 +84,12 @@ public class ClusterRerouteResponse extends AcknowledgedResponse implements ToXC
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
super.writeTo(out);
state.writeTo(out);
RoutingExplanations.writeTo(explanations, out);
} else {
if (out.getVersion().onOrAfter(Version.V_6_3_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_3_0)) {
state.writeTo(out);
} else {
ClusterModule.filterCustomsForPre63Clients(state).writeTo(out);

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.settings;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.common.ParseField;
import org.opensearch.common.io.stream.StreamInput;
@ -69,8 +69,8 @@ public class ClusterUpdateSettingsResponse extends AcknowledgedResponse {
final Settings persistentSettings;
ClusterUpdateSettingsResponse(StreamInput in) throws IOException {
super(in, in.getVersion().onOrAfter(Version.V_6_4_0));
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
super(in, in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0));
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
transientSettings = Settings.readSettingsFromStream(in);
persistentSettings = Settings.readSettingsFromStream(in);
} else {
@ -96,7 +96,7 @@ public class ClusterUpdateSettingsResponse extends AcknowledgedResponse {
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
super.writeTo(out);
Settings.writeSettingsToStream(transientSettings, out);
Settings.writeSettingsToStream(persistentSettings, out);

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.snapshots.restore;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.action.support.master.MasterNodeRequest;
@ -108,12 +108,12 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
includeGlobalState = in.readBoolean();
partial = in.readBoolean();
includeAliases = in.readBoolean();
if (in.getVersion().before(Version.V_7_7_0)) {
if (in.getVersion().before(LegacyESVersion.V_7_7_0)) {
readSettingsFromStream(in); // formerly the unused settings field
}
indexSettings = readSettingsFromStream(in);
ignoreIndexSettings = in.readStringArray();
if (in.getVersion().onOrAfter(Version.V_7_10_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_10_0)) {
snapshotUuid = in.readOptionalString();
}
}
@ -131,12 +131,12 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
out.writeBoolean(includeGlobalState);
out.writeBoolean(partial);
out.writeBoolean(includeAliases);
if (out.getVersion().before(Version.V_7_7_0)) {
if (out.getVersion().before(LegacyESVersion.V_7_7_0)) {
writeSettingsToStream(Settings.EMPTY, out); // formerly the unused settings field
}
writeSettingsToStream(indexSettings, out);
out.writeStringArray(ignoreIndexSettings);
if (out.getVersion().onOrAfter(Version.V_7_10_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_10_0)) {
out.writeOptionalString(snapshotUuid);
} else if (snapshotUuid != null) {
throw new IllegalStateException(

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.snapshots.status;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.Strings;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
@ -70,7 +70,7 @@ public class SnapshotStats implements Writeable, ToXContentObject {
incrementalSize = in.readVLong();
processedSize = in.readVLong();
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
totalFileCount = in.readVInt();
totalSize = in.readVLong();
} else {
@ -160,7 +160,7 @@ public class SnapshotStats implements Writeable, ToXContentObject {
out.writeVLong(incrementalSize);
out.writeVLong(processedSize);
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
out.writeVInt(totalFileCount);
out.writeVLong(totalSize);
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.snapshots.status;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.cluster.SnapshotsInProgress;
import org.opensearch.cluster.SnapshotsInProgress.State;
import org.opensearch.common.Nullable;
@ -92,7 +92,7 @@ public class SnapshotStatus implements ToXContentObject, Writeable {
includeGlobalState = in.readOptionalBoolean();
final long startTime;
final long time;
if (in.getVersion().onOrAfter(Version.V_7_4_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_4_0)) {
startTime = in.readLong();
time = in.readLong();
} else {
@ -193,7 +193,7 @@ public class SnapshotStatus implements ToXContentObject, Writeable {
out.writeByte(state.value());
out.writeList(shards);
out.writeOptionalBoolean(includeGlobalState);
if (out.getVersion().onOrAfter(Version.V_7_4_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_4_0)) {
out.writeLong(stats.getStartTime());
out.writeLong(stats.getTime());
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.state;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.IndicesRequest;
import org.opensearch.action.support.IndicesOptions;
@ -70,7 +70,7 @@ public class ClusterStateRequest extends MasterNodeReadRequest<ClusterStateReque
customs = in.readBoolean();
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_6_0)) {
waitForTimeout = in.readTimeValue();
waitForMetadataVersion = in.readOptionalLong();
}
@ -86,7 +86,7 @@ public class ClusterStateRequest extends MasterNodeReadRequest<ClusterStateReque
out.writeBoolean(customs);
out.writeStringArray(indices);
indicesOptions.writeIndicesOptions(out);
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_6_0)) {
out.writeTimeValue(waitForTimeout);
out.writeOptionalLong(waitForMetadataVersion);
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.state;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionResponse;
import org.opensearch.cluster.ClusterModule;
import org.opensearch.cluster.ClusterName;
@ -57,15 +57,15 @@ public class ClusterStateResponse extends ActionResponse {
public ClusterStateResponse(StreamInput in) throws IOException {
super(in);
clusterName = new ClusterName(in);
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_6_0)) {
clusterState = in.readOptionalWriteable(innerIn -> ClusterState.readFrom(innerIn, null));
} else {
clusterState = ClusterState.readFrom(in, null);
}
if (in.getVersion().before(Version.V_7_0_0)) {
if (in.getVersion().before(LegacyESVersion.V_7_0_0)) {
new ByteSizeValue(in);
}
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_6_0)) {
waitForTimedOut = in.readBoolean();
}
}
@ -102,19 +102,19 @@ public class ClusterStateResponse extends ActionResponse {
@Override
public void writeTo(StreamOutput out) throws IOException {
clusterName.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_6_0)) {
out.writeOptionalWriteable(clusterState);
} else {
if (out.getVersion().onOrAfter(Version.V_6_3_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_3_0)) {
clusterState.writeTo(out);
} else {
ClusterModule.filterCustomsForPre63Clients(clusterState).writeTo(out);
}
}
if (out.getVersion().before(Version.V_7_0_0)) {
if (out.getVersion().before(LegacyESVersion.V_7_0_0)) {
ByteSizeValue.ZERO.writeTo(out);
}
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_6_0)) {
out.writeBoolean(waitForTimedOut);
}
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.stats;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.FailedNodeException;
import org.opensearch.action.support.nodes.BaseNodesResponse;
import org.opensearch.cluster.ClusterName;
@ -66,7 +66,7 @@ public class ClusterStatsResponse extends BaseNodesResponse<ClusterStatsNodeResp
String clusterUUID = null;
MappingStats mappingStats = null;
AnalysisStats analysisStats = null;
if (in.getVersion().onOrAfter(Version.V_7_7_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_7_0)) {
clusterUUID = in.readOptionalString();
mappingStats = in.readOptionalWriteable(MappingStats::new);
analysisStats = in.readOptionalWriteable(AnalysisStats::new);
@ -125,7 +125,7 @@ public class ClusterStatsResponse extends BaseNodesResponse<ClusterStatsNodeResp
super.writeTo(out);
out.writeVLong(timestamp);
out.writeOptionalWriteable(status);
if (out.getVersion().onOrAfter(Version.V_7_7_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_7_0)) {
out.writeOptionalString(clusterUUID);
out.writeOptionalWriteable(indicesStats.getMappings());
out.writeOptionalWriteable(indicesStats.getAnalysis());

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.storedscripts;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.support.master.AcknowledgedRequest;
import org.opensearch.common.io.stream.StreamInput;
@ -48,7 +48,7 @@ public class DeleteStoredScriptRequest extends AcknowledgedRequest<DeleteStoredS
public DeleteStoredScriptRequest(StreamInput in) throws IOException {
super(in);
if (in.getVersion().before(Version.V_6_0_0_alpha2)) {
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
in.readString(); // read lang from previous versions
}
@ -92,7 +92,7 @@ public class DeleteStoredScriptRequest extends AcknowledgedRequest<DeleteStoredS
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().before(Version.V_6_0_0_alpha2)) {
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
out.writeString(""); // write an empty lang to previous versions
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.storedscripts;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.support.master.MasterNodeReadRequest;
import org.opensearch.common.io.stream.StreamInput;
@ -58,7 +58,7 @@ public class GetStoredScriptRequest extends MasterNodeReadRequest<GetStoredScrip
public GetStoredScriptRequest(StreamInput in) throws IOException {
super(in);
if (in.getVersion().before(Version.V_6_0_0_alpha2)) {
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
in.readString(); // read lang from previous versions
}
@ -69,7 +69,7 @@ public class GetStoredScriptRequest extends MasterNodeReadRequest<GetStoredScrip
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().before(Version.V_6_0_0_alpha2)) {
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
out.writeString(""); // write an empty lang to previous versions
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.storedscripts;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionResponse;
import org.opensearch.common.ParseField;
import org.opensearch.common.io.stream.StreamInput;
@ -88,7 +88,7 @@ public class GetStoredScriptResponse extends ActionResponse implements StatusToX
source = null;
}
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
id = in.readString();
}
}
@ -141,7 +141,7 @@ public class GetStoredScriptResponse extends ActionResponse implements StatusToX
out.writeBoolean(true);
source.writeTo(out);
}
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
out.writeString(id);
}
}

View File

@ -32,7 +32,7 @@
package org.opensearch.action.admin.cluster.storedscripts;
import org.opensearch.Version;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.support.master.AcknowledgedRequest;
import org.opensearch.common.bytes.BytesReference;
@ -59,13 +59,13 @@ public class PutStoredScriptRequest extends AcknowledgedRequest<PutStoredScriptR
public PutStoredScriptRequest(StreamInput in) throws IOException {
super(in);
if (in.getVersion().before(Version.V_6_0_0_alpha2)) {
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
in.readString(); // read lang from previous versions
}
id = in.readOptionalString();
content = in.readBytesReference();
xContentType = in.readEnum(XContentType.class);
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha2)) {
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha2)) {
context = in.readOptionalString();
source = new StoredScriptSource(in);
} else {
@ -147,13 +147,13 @@ public class PutStoredScriptRequest extends AcknowledgedRequest<PutStoredScriptR
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().before(Version.V_6_0_0_alpha2)) {
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
out.writeString(source == null ? "" : source.getLang());
}
out.writeOptionalString(id);
out.writeBytesReference(content);
out.writeEnum(xContentType);
if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha2)) {
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha2)) {
out.writeOptionalString(context);
source.writeTo(out);
}

Some files were not shown because too many files have changed in this diff Show More