From 821417b9abec4a8d0e3ae4f25ee79c9ff15a110c Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Thu, 9 Dec 2021 00:11:36 -0500 Subject: [PATCH] Remove LegacyESVersion.V_6_1_x constants (#1681) This commit removes LegacyESVersion.V_6_1_x constants including all pre-release versions and bug fixes. Signed-off-by: Nicholas Walter Knize --- .../percolator/PercolateQueryBuilder.java | 30 +--- .../percolator/PercolatorFieldMapper.java | 11 +- .../opensearch/percolator/QueryAnalyzer.java | 141 +++++++----------- .../percolator/CandidateQueryTests.java | 4 +- .../upgrades/FullClusterRestartIT.java | 2 +- .../FullClusterRestartSettingsUpgradeIT.java | 5 +- .../java/org/opensearch/LegacyESVersion.java | 5 - .../admin/cluster/node/stats/NodeStats.java | 10 +- .../cluster/node/stats/NodesStatsRequest.java | 8 +- .../admin/indices/open/OpenIndexRequest.java | 9 +- .../admin/indices/open/OpenIndexResponse.java | 7 +- .../indices/rollover/MaxSizeCondition.java | 7 - .../admin/indices/shrink/ResizeAction.java | 3 - .../admin/indices/shrink/ResizeRequest.java | 14 +- .../indices/shrink/TransportResizeAction.java | 8 +- .../action/search/SearchResponse.java | 10 +- .../decider/ResizeAllocationDecider.java | 4 - .../common/lucene/search/Queries.java | 7 +- .../opensearch/common/settings/Settings.java | 52 ++----- .../org/opensearch/common/unit/Fuzziness.java | 23 ++- .../opensearch/discovery/DiscoveryStats.java | 13 +- .../org/opensearch/index/engine/Segment.java | 15 +- .../index/mapper/FieldNamesFieldMapper.java | 35 ----- .../index/mapper/SeqNoFieldMapper.java | 8 - .../index/query/ExistsQueryBuilder.java | 21 --- .../index/query/MatchQueryBuilder.java | 9 +- .../index/query/MultiMatchQueryBuilder.java | 24 +-- .../index/query/QueryStringQueryBuilder.java | 13 +- .../index/query/SimpleQueryStringBuilder.java | 21 +-- .../reindex/AbstractBulkByScrollRequest.java | 14 +- .../org/opensearch/index/shard/DocsStats.java | 11 +- .../org/opensearch/monitor/os/OsStats.java | 21 +-- .../search/sort/FieldSortBuilder.java | 8 +- .../search/sort/GeoDistanceSortBuilder.java | 8 +- .../search/sort/ScriptSortBuilder.java | 8 +- .../completion/CompletionSuggestion.java | 9 +- .../CompletionSuggestionBuilder.java | 9 +- .../common/lucene/search/QueriesTests.java | 13 +- .../common/settings/SettingsTests.java | 2 +- .../index/query/ExistsQueryBuilderTests.java | 22 +-- .../query/QueryStringQueryBuilderTests.java | 7 +- .../index/query/RangeQueryBuilderTests.java | 18 +-- .../RemoteClusterConnectionTests.java | 33 ---- .../rest/yaml/section/SkipSectionTests.java | 8 +- 44 files changed, 174 insertions(+), 536 deletions(-) diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index 733987b1521..5e245f7082a 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -55,7 +55,6 @@ import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.SetOnce; -import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchException; import org.opensearch.ResourceNotFoundException; import org.opensearch.Version; @@ -286,9 +285,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder 1) { - throw new IllegalArgumentException("Nodes prior to 6.1.0 cannot accept multiple documents"); - } - BytesReference doc = documents.isEmpty() ? null : documents.iterator().next(); - out.writeOptionalBytesReference(doc); + out.writeVInt(documents.size()); + for (BytesReference document : documents) { + out.writeBytesReference(document); } if (documents.isEmpty() == false) { out.writeEnum(documentXContentType); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java index 8cf39ec18ef..72adc5539d6 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java @@ -54,7 +54,6 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.common.ParsingException; @@ -67,7 +66,6 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentLocation; import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.BinaryFieldMapper; import org.opensearch.index.mapper.FieldMapper; import org.opensearch.index.mapper.KeywordFieldMapper; @@ -109,8 +107,6 @@ import static org.opensearch.index.query.AbstractQueryBuilder.parseInnerQueryBui public class PercolatorFieldMapper extends ParametrizedFieldMapper { - static final XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE; - static final Setting INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING = Setting.boolSetting( "index.percolator.map_unmapped_fields_as_text", false, @@ -303,7 +299,7 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { } BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder(); - if (canUseMinimumShouldMatchField && indexVersion.onOrAfter(LegacyESVersion.V_6_1_0)) { + if (canUseMinimumShouldMatchField) { LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name()); for (BytesRef extractedTerm : extractedTerms) { subQueries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm))); @@ -458,7 +454,6 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { } } - Version indexVersionCreated = context.mapperService().getIndexSettings().getIndexVersionCreated(); if (result.matchAllDocs) { doc.add(new Field(extractionResultField.name(), EXTRACTION_FAILED, INDEXED_KEYWORD)); if (result.verified) { @@ -471,9 +466,7 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper { } createFieldNamesField(context); - if (indexVersionCreated.onOrAfter(LegacyESVersion.V_6_1_0)) { - doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); - } + doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); } static void configureContext(QueryShardContext context, boolean mapUnmappedFieldsAsString) { diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java index 9cb51670caa..4a8ab8ba7d4 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java @@ -53,7 +53,6 @@ import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.ByteRunAutomaton; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.index.query.DateRangeIncludingNowQuery; @@ -61,11 +60,9 @@ import org.opensearch.index.query.DateRangeIncludingNowQuery; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -229,14 +226,6 @@ final class QueryAnalyzer { boolean verified = isVerified(query); Set qe = Arrays.stream(terms).map(QueryExtraction::new).collect(Collectors.toSet()); if (qe.size() > 0) { - if (version.before(LegacyESVersion.V_6_1_0) && conjunction) { - Optional longest = qe.stream() - .filter(q -> q.term != null) - .max(Comparator.comparingInt(q -> q.term.bytes().length)); - if (longest.isPresent()) { - qe = Collections.singleton(longest.get()); - } - } this.terms.add(new Result(verified, qe, conjunction ? qe.size() : 1)); } } @@ -300,80 +289,71 @@ final class QueryAnalyzer { if (conjunctionsWithUnknowns.size() == 1) { return conjunctionsWithUnknowns.get(0); } - if (version.onOrAfter(LegacyESVersion.V_6_1_0)) { - for (Result subResult : conjunctions) { - if (subResult.isMatchNoDocs()) { - return subResult; - } + for (Result subResult : conjunctions) { + if (subResult.isMatchNoDocs()) { + return subResult; } + } - int msm = 0; - boolean verified = conjunctionsWithUnknowns.size() == conjunctions.size(); - boolean matchAllDocs = true; - Set extractions = new HashSet<>(); - Set seenRangeFields = new HashSet<>(); - for (Result result : conjunctions) { + int msm = 0; + boolean verified = conjunctionsWithUnknowns.size() == conjunctions.size(); + boolean matchAllDocs = true; + Set extractions = new HashSet<>(); + Set seenRangeFields = new HashSet<>(); + for (Result result : conjunctions) { - int resultMsm = result.minimumShouldMatch; - for (QueryExtraction queryExtraction : result.extractions) { - if (queryExtraction.range != null) { - // In case of range queries each extraction does not simply increment the - // minimum_should_match for that percolator query like for a term based extraction, - // so that can lead to more false positives for percolator queries with range queries - // than term based queries. - // This is because the way number fields are extracted from the document to be - // percolated. Per field a single range is extracted and if a percolator query has two or - // more range queries on the same field, then the minimum should match can be higher than clauses - // in the CoveringQuery. Therefore right now the minimum should match is only incremented once per - // number field when processing the percolator query at index time. - // For multiple ranges within a single extraction (ie from an existing conjunction or disjunction) - // then this will already have been taken care of, so we only check against fieldnames from - // previously processed extractions, and don't add to the seenRangeFields list until all - // extractions from this result are processed - if (seenRangeFields.contains(queryExtraction.range.fieldName)) { - resultMsm = Math.max(0, resultMsm - 1); - verified = false; - } - } else { - // In case that there are duplicate term query extractions we need to be careful with - // incrementing msm, because that could lead to valid matches not becoming candidate matches: - // query: (field:val1 AND field:val2) AND (field:val2 AND field:val3) - // doc: field: val1 val2 val3 - // So lets be protective and decrease the msm: - if (extractions.contains(queryExtraction)) { - resultMsm = Math.max(0, resultMsm - 1); - verified = false; - } + int resultMsm = result.minimumShouldMatch; + for (QueryExtraction queryExtraction : result.extractions) { + if (queryExtraction.range != null) { + // In case of range queries each extraction does not simply increment the + // minimum_should_match for that percolator query like for a term based extraction, + // so that can lead to more false positives for percolator queries with range queries + // than term based queries. + // This is because the way number fields are extracted from the document to be + // percolated. Per field a single range is extracted and if a percolator query has two or + // more range queries on the same field, then the minimum should match can be higher than clauses + // in the CoveringQuery. Therefore right now the minimum should match is only incremented once per + // number field when processing the percolator query at index time. + // For multiple ranges within a single extraction (ie from an existing conjunction or disjunction) + // then this will already have been taken care of, so we only check against fieldnames from + // previously processed extractions, and don't add to the seenRangeFields list until all + // extractions from this result are processed + if (seenRangeFields.contains(queryExtraction.range.fieldName)) { + resultMsm = Math.max(0, resultMsm - 1); + verified = false; + } + } else { + // In case that there are duplicate term query extractions we need to be careful with + // incrementing msm, because that could lead to valid matches not becoming candidate matches: + // query: (field:val1 AND field:val2) AND (field:val2 AND field:val3) + // doc: field: val1 val2 val3 + // So lets be protective and decrease the msm: + if (extractions.contains(queryExtraction)) { + resultMsm = Math.max(0, resultMsm - 1); + verified = false; } } - msm += resultMsm; - - // add range fields from this Result to the seenRangeFields set so that minimumShouldMatch is correctly - // calculated for subsequent Results - result.extractions.stream().map(e -> e.range).filter(Objects::nonNull).map(e -> e.fieldName).forEach(seenRangeFields::add); - - if (result.verified == false - // If some inner extractions are optional, the result can't be verified - || result.minimumShouldMatch < result.extractions.size()) { - verified = false; - - } - matchAllDocs &= result.matchAllDocs; - extractions.addAll(result.extractions); } + msm += resultMsm; + + // add range fields from this Result to the seenRangeFields set so that minimumShouldMatch is correctly + // calculated for subsequent Results + result.extractions.stream().map(e -> e.range).filter(Objects::nonNull).map(e -> e.fieldName).forEach(seenRangeFields::add); + + if (result.verified == false + // If some inner extractions are optional, the result can't be verified + || result.minimumShouldMatch < result.extractions.size()) { + verified = false; - if (matchAllDocs) { - return new Result(matchAllDocs, verified); - } else { - return new Result(verified, extractions, msm); } + matchAllDocs &= result.matchAllDocs; + extractions.addAll(result.extractions); + } + if (matchAllDocs) { + return new Result(matchAllDocs, verified); } else { - Result bestClause = null; - for (Result result : conjunctions) { - bestClause = selectBestResult(result, bestClause); - } - return bestClause; + return new Result(verified, extractions, msm); } } @@ -386,12 +366,7 @@ final class QueryAnalyzer { } // Keep track of the msm for each clause: List clauses = new ArrayList<>(disjunctions.size()); - boolean verified; - if (version.before(LegacyESVersion.V_6_1_0)) { - verified = requiredShouldClauses <= 1; - } else { - verified = true; - } + boolean verified = true; int numMatchAllClauses = 0; boolean hasRangeExtractions = false; @@ -438,10 +413,10 @@ final class QueryAnalyzer { boolean matchAllDocs = numMatchAllClauses > 0 && numMatchAllClauses >= requiredShouldClauses; int msm = 0; - if (version.onOrAfter(LegacyESVersion.V_6_1_0) && // Having ranges would mean we need to juggle with the msm and that complicates this logic a lot, // so for now lets not do it. - hasRangeExtractions == false) { + if (hasRangeExtractions == false) { + // Figure out what the combined msm is for this disjunction: // (sum the lowest required clauses, otherwise we're too strict and queries may not match) clauses = clauses.stream().filter(val -> val > 0).sorted().collect(Collectors.toList()); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java index 5f44a9a507b..871351ad5b2 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java @@ -90,7 +90,6 @@ import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.CheckedFunction; @@ -114,6 +113,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.junit.After; import org.junit.Before; +import org.opensearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; @@ -673,7 +673,7 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase { IndexSearcher shardSearcher = newSearcher(directoryReader); shardSearcher.setQueryCache(null); - Version v = LegacyESVersion.V_6_1_0; + Version v = VersionUtils.randomIndexCompatibleVersion(random()); MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); Query query = fieldType.percolateQuery( diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java index 2934ae10e15..51fe37e2c9b 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java @@ -1130,7 +1130,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { * we will hit a warning exception because we put some deprecated settings in that test. */ if (isRunningAgainstOldCluster() == false - && getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_1_0) && getOldClusterVersion().before(LegacyESVersion.V_6_5_0)) { + && getOldClusterVersion().before(LegacyESVersion.V_6_5_0)) { for (String warning : e.getResponse().getWarnings()) { assertThat(warning, containsString( "setting was deprecated and will be removed in a future release! " diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartSettingsUpgradeIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartSettingsUpgradeIT.java index 53b21fda8f8..2d28a712ee0 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartSettingsUpgradeIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartSettingsUpgradeIT.java @@ -58,7 +58,6 @@ import static org.hamcrest.Matchers.equalTo; public class FullClusterRestartSettingsUpgradeIT extends AbstractFullClusterRestartTestCase { public void testRemoteClusterSettingsUpgraded() throws IOException { - assumeTrue("skip_unavailable did not exist until 6.1.0", getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_1_0)); assumeTrue("settings automatically upgraded since 6.5.0", getOldClusterVersion().before(LegacyESVersion.V_6_5_0)); if (isRunningAgainstOldCluster()) { final Request putSettingsRequest = new Request("PUT", "/_cluster/settings"); @@ -91,7 +90,7 @@ public class FullClusterRestartSettingsUpgradeIT extends AbstractFullClusterRest SEARCH_REMOTE_CLUSTERS_SEEDS.getConcreteSettingForNamespace("foo").get(settings), equalTo(Collections.singletonList("localhost:9200"))); assertTrue(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").exists(settings)); - assertEquals(String.valueOf(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").get(settings)), + assertEquals(String.valueOf(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").get(settings)), "localhost:9200"); } @@ -118,7 +117,7 @@ public class FullClusterRestartSettingsUpgradeIT extends AbstractFullClusterRest equalTo(Collections.singletonList("localhost:9200"))); assertFalse(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").exists(settings)); assertTrue(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").exists(settings)); - assertEquals(String.valueOf(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").get(settings)), + assertEquals(String.valueOf(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").get(settings)), "localhost:9200"); } } diff --git a/server/src/main/java/org/opensearch/LegacyESVersion.java b/server/src/main/java/org/opensearch/LegacyESVersion.java index 9cccebea9e3..e4a5441d2dc 100644 --- a/server/src/main/java/org/opensearch/LegacyESVersion.java +++ b/server/src/main/java/org/opensearch/LegacyESVersion.java @@ -46,11 +46,6 @@ import java.lang.reflect.Field; */ public class LegacyESVersion extends Version { - public static final LegacyESVersion V_6_1_0 = new LegacyESVersion(6010099, org.apache.lucene.util.Version.LUCENE_7_1_0); - public static final LegacyESVersion V_6_1_1 = new LegacyESVersion(6010199, org.apache.lucene.util.Version.LUCENE_7_1_0); - public static final LegacyESVersion V_6_1_2 = new LegacyESVersion(6010299, org.apache.lucene.util.Version.LUCENE_7_1_0); - public static final LegacyESVersion V_6_1_3 = new LegacyESVersion(6010399, org.apache.lucene.util.Version.LUCENE_7_1_0); - public static final LegacyESVersion V_6_1_4 = new LegacyESVersion(6010499, org.apache.lucene.util.Version.LUCENE_7_1_0); // The below version is missing from the 7.3 JAR private static final org.apache.lucene.util.Version LUCENE_7_2_1 = org.apache.lucene.util.Version.fromBits(7, 2, 1); public static final LegacyESVersion V_6_2_0 = new LegacyESVersion(6020099, LUCENE_7_2_1); diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java index 03f32596ee3..dd2c649e07c 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java @@ -134,11 +134,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContentFragment { scriptStats = in.readOptionalWriteable(ScriptStats::new); discoveryStats = in.readOptionalWriteable(DiscoveryStats::new); ingestStats = in.readOptionalWriteable(IngestStats::new); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - adaptiveSelectionStats = in.readOptionalWriteable(AdaptiveSelectionStats::new); - } else { - adaptiveSelectionStats = null; - } + adaptiveSelectionStats = in.readOptionalWriteable(AdaptiveSelectionStats::new); scriptCacheStats = null; if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) { if (in.getVersion().before(LegacyESVersion.V_7_9_0)) { @@ -328,9 +324,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContentFragment { out.writeOptionalWriteable(scriptStats); out.writeOptionalWriteable(discoveryStats); out.writeOptionalWriteable(ingestStats); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - out.writeOptionalWriteable(adaptiveSelectionStats); - } + out.writeOptionalWriteable(adaptiveSelectionStats); if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0) && out.getVersion().before(LegacyESVersion.V_7_9_0)) { out.writeOptionalWriteable(scriptCacheStats); } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsRequest.java index ce481f4886a..2ebf2ca424c 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsRequest.java @@ -74,9 +74,7 @@ public class NodesStatsRequest extends BaseNodesRequest { optionallyAddMetric(in.readBoolean(), Metric.SCRIPT.metricName()); optionallyAddMetric(in.readBoolean(), Metric.DISCOVERY.metricName()); optionallyAddMetric(in.readBoolean(), Metric.INGEST.metricName()); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - optionallyAddMetric(in.readBoolean(), Metric.ADAPTIVE_SELECTION.metricName()); - } + optionallyAddMetric(in.readBoolean(), Metric.ADAPTIVE_SELECTION.metricName()); } else { requestedMetrics.addAll(in.readStringList()); } @@ -212,9 +210,7 @@ public class NodesStatsRequest extends BaseNodesRequest { out.writeBoolean(Metric.SCRIPT.containedIn(requestedMetrics)); out.writeBoolean(Metric.DISCOVERY.containedIn(requestedMetrics)); out.writeBoolean(Metric.INGEST.containedIn(requestedMetrics)); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - out.writeBoolean(Metric.ADAPTIVE_SELECTION.containedIn(requestedMetrics)); - } + out.writeBoolean(Metric.ADAPTIVE_SELECTION.containedIn(requestedMetrics)); } else { out.writeStringArray(requestedMetrics.toArray(new String[0])); } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java index 3d677a6a6d2..be0e0254edf 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java @@ -32,7 +32,6 @@ package org.opensearch.action.admin.indices.open; -import org.opensearch.LegacyESVersion; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.IndicesRequest; import org.opensearch.action.support.ActiveShardCount; @@ -59,9 +58,7 @@ public class OpenIndexRequest extends AcknowledgedRequest impl super(in); indices = in.readStringArray(); indicesOptions = IndicesOptions.readIndicesOptions(in); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - waitForActiveShards = ActiveShardCount.readFrom(in); - } + waitForActiveShards = ActiveShardCount.readFrom(in); } public OpenIndexRequest() {} @@ -167,8 +164,6 @@ public class OpenIndexRequest extends AcknowledgedRequest impl super.writeTo(out); out.writeStringArray(indices); indicesOptions.writeIndicesOptions(out); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - waitForActiveShards.writeTo(out); - } + waitForActiveShards.writeTo(out); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexResponse.java index a2323a6b1ae..af2754bf210 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexResponse.java @@ -32,7 +32,6 @@ package org.opensearch.action.admin.indices.open; -import org.opensearch.LegacyESVersion; import org.opensearch.action.support.master.ShardsAcknowledgedResponse; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -57,7 +56,7 @@ public class OpenIndexResponse extends ShardsAcknowledgedResponse { } public OpenIndexResponse(StreamInput in) throws IOException { - super(in, in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0), true); + super(in, true, true); } public OpenIndexResponse(boolean acknowledged, boolean shardsAcknowledged) { @@ -67,9 +66,7 @@ public class OpenIndexResponse extends ShardsAcknowledgedResponse { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - writeShardsAcknowledged(out); - } + writeShardsAcknowledged(out); } public static OpenIndexResponse fromXContent(XContentParser parser) { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java index 01c8cff49bd..147d81a5296 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java @@ -32,8 +32,6 @@ package org.opensearch.action.admin.indices.rollover; -import org.opensearch.LegacyESVersion; -import org.opensearch.Version; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.unit.ByteSizeUnit; @@ -65,11 +63,6 @@ public class MaxSizeCondition extends Condition { return new Result(this, stats.indexSize.getBytes() >= value.getBytes()); } - @Override - boolean includedInVersion(Version version) { - return version.onOrAfter(LegacyESVersion.V_6_1_0); - } - @Override public String getWriteableName() { return NAME; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeAction.java index e4d6a079d14..5ecdd622061 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeAction.java @@ -32,15 +32,12 @@ package org.opensearch.action.admin.indices.shrink; -import org.opensearch.LegacyESVersion; -import org.opensearch.Version; import org.opensearch.action.ActionType; public class ResizeAction extends ActionType { public static final ResizeAction INSTANCE = new ResizeAction(); public static final String NAME = "indices:admin/resize"; - public static final Version COMPATIBILITY_VERSION = LegacyESVersion.V_6_1_0; // TODO remove this once it's backported private ResizeAction() { super(NAME, ResizeResponse::new); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java index 1dd94f8dce9..5b4878a21aa 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java @@ -81,11 +81,7 @@ public class ResizeRequest extends AcknowledgedRequest implements super(in); targetIndexRequest = new CreateIndexRequest(in); sourceIndex = in.readString(); - if (in.getVersion().onOrAfter(ResizeAction.COMPATIBILITY_VERSION)) { - type = in.readEnum(ResizeType.class); - } else { - type = ResizeType.SHRINK; // BWC this used to be shrink only - } + type = in.readEnum(ResizeType.class); if (in.getVersion().before(LegacyESVersion.V_6_4_0)) { copySettings = null; } else { @@ -128,12 +124,10 @@ public class ResizeRequest extends AcknowledgedRequest implements super.writeTo(out); targetIndexRequest.writeTo(out); out.writeString(sourceIndex); - if (out.getVersion().onOrAfter(ResizeAction.COMPATIBILITY_VERSION)) { - if (type == ResizeType.CLONE && out.getVersion().before(LegacyESVersion.V_7_4_0)) { - throw new IllegalArgumentException("can't send clone request to a node that's older than " + LegacyESVersion.V_7_4_0); - } - out.writeEnum(type); + if (type == ResizeType.CLONE && out.getVersion().before(LegacyESVersion.V_7_4_0)) { + throw new IllegalArgumentException("can't send clone request to a node that's older than " + LegacyESVersion.V_7_4_0); } + out.writeEnum(type); // noinspection StatementWithEmptyBody if (out.getVersion().before(LegacyESVersion.V_6_4_0)) { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java index 86cfcbe770a..b35febe60af 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java @@ -250,12 +250,6 @@ public class TransportResizeAction extends TransportMasterNodeAction) value); - } else { - builder.put(key, value.toString()); - } - } - } else { - for (int i = 0; i < numberOfSettings; i++) { - String key = in.readString(); - String value = in.readOptionalString(); - builder.put(key, value); + for (int i = 0; i < numberOfSettings; i++) { + String key = in.readString(); + Object value = in.readGenericValue(); + if (value == null) { + builder.putNull(key); + } else if (value instanceof List) { + builder.putList(key, (List) value); + } else { + builder.put(key, value.toString()); } } return builder.build(); @@ -581,27 +572,10 @@ public final class Settings implements ToXContentFragment { public static void writeSettingsToStream(Settings settings, StreamOutput out) throws IOException { // pull settings to exclude secure settings in size() Set> entries = settings.settings.entrySet(); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - out.writeVInt(entries.size()); - for (Map.Entry entry : entries) { - out.writeString(entry.getKey()); - out.writeGenericValue(entry.getValue()); - } - } else { - int size = entries.stream().mapToInt(e -> e.getValue() instanceof List ? ((List) e.getValue()).size() : 1).sum(); - out.writeVInt(size); - for (Map.Entry entry : entries) { - if (entry.getValue() instanceof List) { - int idx = 0; - for (String value : (List) entry.getValue()) { - out.writeString(entry.getKey() + "." + idx++); - out.writeOptionalString(value); - } - } else { - out.writeString(entry.getKey()); - out.writeOptionalString(toString(entry.getValue())); - } - } + out.writeVInt(entries.size()); + for (Map.Entry entry : entries) { + out.writeString(entry.getKey()); + out.writeGenericValue(entry.getValue()); } } diff --git a/server/src/main/java/org/opensearch/common/unit/Fuzziness.java b/server/src/main/java/org/opensearch/common/unit/Fuzziness.java index 3d62af7ebd6..8b10d882351 100644 --- a/server/src/main/java/org/opensearch/common/unit/Fuzziness.java +++ b/server/src/main/java/org/opensearch/common/unit/Fuzziness.java @@ -31,7 +31,6 @@ package org.opensearch.common.unit; -import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchParseException; import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; @@ -95,7 +94,7 @@ public final class Fuzziness implements ToXContentFragment, Writeable { */ public Fuzziness(StreamInput in) throws IOException { fuzziness = in.readString(); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0) && in.readBoolean()) { + if (in.readBoolean()) { lowDistance = in.readVInt(); highDistance = in.readVInt(); } @@ -104,17 +103,15 @@ public final class Fuzziness implements ToXContentFragment, Writeable { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(fuzziness); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - // we cannot serialize the low/high bounds since the other node does not know about them. - // This is a best-effort to not fail queries in case the cluster is being upgraded and users - // start using features that are not available on all nodes. - if (isAutoWithCustomValues()) { - out.writeBoolean(true); - out.writeVInt(lowDistance); - out.writeVInt(highDistance); - } else { - out.writeBoolean(false); - } + // we cannot serialize the low/high bounds since the other node does not know about them. + // This is a best-effort to not fail queries in case the cluster is being upgraded and users + // start using features that are not available on all nodes. + if (isAutoWithCustomValues()) { + out.writeBoolean(true); + out.writeVInt(lowDistance); + out.writeVInt(highDistance); + } else { + out.writeBoolean(false); } } diff --git a/server/src/main/java/org/opensearch/discovery/DiscoveryStats.java b/server/src/main/java/org/opensearch/discovery/DiscoveryStats.java index 159e997bdf7..e480799c9f7 100644 --- a/server/src/main/java/org/opensearch/discovery/DiscoveryStats.java +++ b/server/src/main/java/org/opensearch/discovery/DiscoveryStats.java @@ -32,7 +32,6 @@ package org.opensearch.discovery; -import org.opensearch.LegacyESVersion; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -55,21 +54,13 @@ public class DiscoveryStats implements Writeable, ToXContentFragment { public DiscoveryStats(StreamInput in) throws IOException { queueStats = in.readOptionalWriteable(PendingClusterStateStats::new); - - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - publishStats = in.readOptionalWriteable(PublishClusterStateStats::new); - } else { - publishStats = null; - } + publishStats = in.readOptionalWriteable(PublishClusterStateStats::new); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(queueStats); - - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - out.writeOptionalWriteable(publishStats); - } + out.writeOptionalWriteable(publishStats); } @Override diff --git a/server/src/main/java/org/opensearch/index/engine/Segment.java b/server/src/main/java/org/opensearch/index/engine/Segment.java index fead08ed6c7..1ef3a2f94a8 100644 --- a/server/src/main/java/org/opensearch/index/engine/Segment.java +++ b/server/src/main/java/org/opensearch/index/engine/Segment.java @@ -40,7 +40,6 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedNumericSelector; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; -import org.opensearch.LegacyESVersion; import org.opensearch.common.Nullable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -89,10 +88,8 @@ public class Segment implements Writeable { ramTree = readRamTree(in); } segmentSort = readSegmentSort(in); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0) && in.readBoolean()) { + if (in.readBoolean()) { attributes = in.readMap(StreamInput::readString, StreamInput::readString); - } else { - attributes = null; } } @@ -204,12 +201,10 @@ public class Segment implements Writeable { writeRamTree(out, ramTree); } writeSegmentSort(out, segmentSort); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - boolean hasAttributes = attributes != null; - out.writeBoolean(hasAttributes); - if (hasAttributes) { - out.writeMap(attributes, StreamOutput::writeString, StreamOutput::writeString); - } + boolean hasAttributes = attributes != null; + out.writeBoolean(hasAttributes); + if (hasAttributes) { + out.writeMap(attributes, StreamOutput::writeString, StreamOutput::writeString); } } diff --git a/server/src/main/java/org/opensearch/index/mapper/FieldNamesFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/FieldNamesFieldMapper.java index eedecb35873..b038de69b2e 100644 --- a/server/src/main/java/org/opensearch/index/mapper/FieldNamesFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/FieldNamesFieldMapper.java @@ -32,20 +32,15 @@ package org.opensearch.index.mapper; -import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.common.Explicit; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.lookup.SearchLookup; -import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -180,36 +175,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { return (FieldNamesFieldType) super.fieldType(); } - @Override - public void postParse(ParseContext context) throws IOException { - if (context.indexSettings().getIndexVersionCreated().before(LegacyESVersion.V_6_1_0)) { - if (fieldType().isEnabled() == false) { - return; - } - for (ParseContext.Document document : context) { - final List paths = new ArrayList<>(document.getFields().size()); - String previousPath = ""; // used as a sentinel - field names can't be empty - for (IndexableField field : document.getFields()) { - final String path = field.name(); - if (path.equals(previousPath)) { - // Sometimes mappers create multiple Lucene fields, eg. one for indexing, - // one for doc values and one for storing. Deduplicating is not required - // for correctness but this simple check helps save utf-8 conversions and - // gives Lucene fewer values to deal with. - continue; - } - paths.add(path); - previousPath = path; - } - for (String path : paths) { - for (String fieldName : extractFieldNames(path)) { - document.add(new Field(fieldType().name(), fieldName, Defaults.FIELD_TYPE)); - } - } - } - } - } - static Iterable extractFieldNames(final String fullPath) { return new Iterable() { @Override diff --git a/server/src/main/java/org/opensearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/SeqNoFieldMapper.java index 816d5f6731f..9c3f56cfc2b 100644 --- a/server/src/main/java/org/opensearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/SeqNoFieldMapper.java @@ -38,8 +38,6 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.opensearch.LegacyESVersion; -import org.opensearch.Version; import org.opensearch.common.Nullable; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.IndexNumericFieldData.NumericType; @@ -215,15 +213,9 @@ public class SeqNoFieldMapper extends MetadataFieldMapper { // we share the parent docs fields to ensure good compression SequenceIDFields seqID = context.seqID(); assert seqID != null; - final Version versionCreated = context.mapperService().getIndexSettings().getIndexVersionCreated(); - final boolean includePrimaryTerm = versionCreated.before(LegacyESVersion.V_6_1_0); for (Document doc : context.nonRootDocuments()) { doc.add(seqID.seqNo); doc.add(seqID.seqNoDocValue); - if (includePrimaryTerm) { - // primary terms are used to distinguish between parent and nested docs since 6.1.0 - doc.add(seqID.primaryTerm); - } } } diff --git a/server/src/main/java/org/opensearch/index/query/ExistsQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/ExistsQueryBuilder.java index 6239e79f883..75c54093a8b 100644 --- a/server/src/main/java/org/opensearch/index/query/ExistsQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/ExistsQueryBuilder.java @@ -40,7 +40,6 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.opensearch.LegacyESVersion; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; @@ -172,10 +171,6 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder } } - if (context.indexVersionCreated().before(LegacyESVersion.V_6_1_0)) { - return newLegacyExistsQuery(context, fields); - } - if (fields.size() == 1) { String field = fields.iterator().next(); return newFieldExistsQuery(context, field); @@ -188,22 +183,6 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder return new ConstantScoreQuery(boolFilterBuilder.build()); } - private static Query newLegacyExistsQuery(QueryShardContext context, Collection fields) { - // We create TermsQuery directly here rather than using FieldNamesFieldType.termsQuery() - // so we don't end up with deprecation warnings - if (fields.size() == 1) { - Query filter = newLegacyExistsQuery(context, fields.iterator().next()); - return new ConstantScoreQuery(filter); - } - - BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); - for (String field : fields) { - Query filter = newLegacyExistsQuery(context, field); - boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); - } - return new ConstantScoreQuery(boolFilterBuilder.build()); - } - private static Query newLegacyExistsQuery(QueryShardContext context, String field) { MappedFieldType fieldType = context.fieldMapper(field); String fieldName = fieldType != null ? fieldType.name() : field; diff --git a/server/src/main/java/org/opensearch/index/query/MatchQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/MatchQueryBuilder.java index 0cc3436fa60..54c0f79dbac 100644 --- a/server/src/main/java/org/opensearch/index/query/MatchQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/MatchQueryBuilder.java @@ -34,7 +34,6 @@ package org.opensearch.index.query; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.Query; -import org.opensearch.LegacyESVersion; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; @@ -147,9 +146,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { fuzzyRewrite = in.readOptionalString(); fuzziness = in.readOptionalWriteable(Fuzziness::new); cutoffFrequency = in.readOptionalFloat(); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - autoGenerateSynonymsPhraseQuery = in.readBoolean(); - } + autoGenerateSynonymsPhraseQuery = in.readBoolean(); } @Override @@ -168,9 +165,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { out.writeOptionalString(fuzzyRewrite); out.writeOptionalWriteable(fuzziness); out.writeOptionalFloat(cutoffFrequency); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - out.writeBoolean(autoGenerateSynonymsPhraseQuery); - } + out.writeBoolean(autoGenerateSynonymsPhraseQuery); } /** Returns the field name used in this query. */ diff --git a/server/src/main/java/org/opensearch/index/query/MultiMatchQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/MultiMatchQueryBuilder.java index 5f06ba3fea8..3a130f39553 100644 --- a/server/src/main/java/org/opensearch/index/query/MultiMatchQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/MultiMatchQueryBuilder.java @@ -251,17 +251,11 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder { order = in.readOptionalWriteable(SortOrder::readFromStream); sortMode = in.readOptionalWriteable(SortMode::readFromStream); unmappedType = in.readOptionalString(); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); - } + nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); if (in.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) { numericType = in.readOptionalString(); } @@ -174,9 +172,7 @@ public class FieldSortBuilder extends SortBuilder { out.writeOptionalWriteable(order); out.writeOptionalWriteable(sortMode); out.writeOptionalString(unmappedType); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - out.writeOptionalWriteable(nestedSort); - } + out.writeOptionalWriteable(nestedSort); if (out.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) { out.writeOptionalString(numericType); } diff --git a/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java index cfe4bc877d0..9fb90ee1ebc 100644 --- a/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java @@ -187,9 +187,7 @@ public class GeoDistanceSortBuilder extends SortBuilder sortMode = in.readOptionalWriteable(SortMode::readFromStream); nestedFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nestedPath = in.readOptionalString(); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); - } + nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); validation = GeoValidationMethod.readFromStream(in); if (in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) { ignoreUnmapped = in.readBoolean(); @@ -206,9 +204,7 @@ public class GeoDistanceSortBuilder extends SortBuilder out.writeOptionalWriteable(sortMode); out.writeOptionalNamedWriteable(nestedFilter); out.writeOptionalString(nestedPath); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - out.writeOptionalWriteable(nestedSort); - } + out.writeOptionalWriteable(nestedSort); validation.writeTo(out); if (out.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) { out.writeBoolean(ignoreUnmapped); diff --git a/server/src/main/java/org/opensearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/opensearch/search/sort/ScriptSortBuilder.java index 5e38b3032d4..136d60598e7 100644 --- a/server/src/main/java/org/opensearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/opensearch/search/sort/ScriptSortBuilder.java @@ -135,9 +135,7 @@ public class ScriptSortBuilder extends SortBuilder { sortMode = in.readOptionalWriteable(SortMode::readFromStream); nestedPath = in.readOptionalString(); nestedFilter = in.readOptionalNamedWriteable(QueryBuilder.class); - if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); - } + nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); } @Override @@ -148,9 +146,7 @@ public class ScriptSortBuilder extends SortBuilder { out.writeOptionalWriteable(sortMode); out.writeOptionalString(nestedPath); out.writeOptionalNamedWriteable(nestedFilter); - if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) { - out.writeOptionalWriteable(nestedSort); - } + out.writeOptionalWriteable(nestedSort); } /** diff --git a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggestion.java b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggestion.java index 1f500f89419..3f6a3138daa 100644 --- a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggestion.java +++ b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggestion.java @@ -34,7 +34,6 @@ package org.opensearch.search.suggest.completion; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.util.PriorityQueue; -import org.opensearch.LegacyESVersion; import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -98,9 +97,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion remoteAddresses = Arrays.asList("seed:1", "seed:2"); String serverName = "the_server_name"; diff --git a/test/framework/src/test/java/org/opensearch/test/rest/yaml/section/SkipSectionTests.java b/test/framework/src/test/java/org/opensearch/test/rest/yaml/section/SkipSectionTests.java index 45833f64ebc..2735128a458 100644 --- a/test/framework/src/test/java/org/opensearch/test/rest/yaml/section/SkipSectionTests.java +++ b/test/framework/src/test/java/org/opensearch/test/rest/yaml/section/SkipSectionTests.java @@ -87,12 +87,16 @@ public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCa } public void testParseSkipSectionVersionNoFeature() throws Exception { - parser = createParser(YamlXContent.yamlXContent, "version: \" - 6.1.1\"\n" + "reason: Delete ignores the parent param"); + Version version = VersionUtils.randomVersion(random()); + parser = createParser( + YamlXContent.yamlXContent, + "version: \" - " + version + "\"\n" + "reason: Delete ignores the parent param" + ); SkipSection skipSection = SkipSection.parse(parser); assertThat(skipSection, notNullValue()); assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion())); - assertThat(skipSection.getUpperVersion(), equalTo(LegacyESVersion.V_6_1_1)); + assertThat(skipSection.getUpperVersion(), equalTo(version)); assertThat(skipSection.getFeatures().size(), equalTo(0)); assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param")); }