Remove LegacyESVersion.V_6_1_x constants (#1681)
This commit removes LegacyESVersion.V_6_1_x constants including all pre-release versions and bug fixes. Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
parent
b74d71fb74
commit
821417b9ab
|
@ -55,7 +55,6 @@ import org.apache.lucene.util.BitDocIdSet;
|
|||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchException;
|
||||
import org.opensearch.ResourceNotFoundException;
|
||||
import org.opensearch.Version;
|
||||
|
@ -286,9 +285,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
PercolateQueryBuilder(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
field = in.readString();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
name = in.readOptionalString();
|
||||
}
|
||||
name = in.readOptionalString();
|
||||
documentType = in.readOptionalString();
|
||||
indexedDocumentIndex = in.readOptionalString();
|
||||
indexedDocumentType = in.readOptionalString();
|
||||
|
@ -300,12 +297,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
} else {
|
||||
indexedDocumentVersion = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
documents = in.readList(StreamInput::readBytesReference);
|
||||
} else {
|
||||
BytesReference document = in.readOptionalBytesReference();
|
||||
documents = document != null ? Collections.singletonList(document) : Collections.emptyList();
|
||||
}
|
||||
documents = in.readList(StreamInput::readBytesReference);
|
||||
if (documents.isEmpty() == false) {
|
||||
documentXContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
|
@ -329,9 +321,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
throw new IllegalStateException("supplier must be null, can't serialize suppliers, missing a rewriteAndFetch?");
|
||||
}
|
||||
out.writeString(field);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalString(name);
|
||||
}
|
||||
out.writeOptionalString(name);
|
||||
out.writeOptionalString(documentType);
|
||||
out.writeOptionalString(indexedDocumentIndex);
|
||||
out.writeOptionalString(indexedDocumentType);
|
||||
|
@ -344,17 +334,9 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeVInt(documents.size());
|
||||
for (BytesReference document : documents) {
|
||||
out.writeBytesReference(document);
|
||||
}
|
||||
} else {
|
||||
if (documents.size() > 1) {
|
||||
throw new IllegalArgumentException("Nodes prior to 6.1.0 cannot accept multiple documents");
|
||||
}
|
||||
BytesReference doc = documents.isEmpty() ? null : documents.iterator().next();
|
||||
out.writeOptionalBytesReference(doc);
|
||||
out.writeVInt(documents.size());
|
||||
for (BytesReference document : documents) {
|
||||
out.writeBytesReference(document);
|
||||
}
|
||||
if (documents.isEmpty() == false) {
|
||||
out.writeEnum(documentXContentType);
|
||||
|
|
|
@ -54,7 +54,6 @@ import org.apache.lucene.search.TermInSetQuery;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.support.PlainActionFuture;
|
||||
import org.opensearch.common.ParsingException;
|
||||
|
@ -67,7 +66,6 @@ import org.opensearch.common.settings.Setting;
|
|||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.xcontent.XContentLocation;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.index.mapper.BinaryFieldMapper;
|
||||
import org.opensearch.index.mapper.FieldMapper;
|
||||
import org.opensearch.index.mapper.KeywordFieldMapper;
|
||||
|
@ -109,8 +107,6 @@ import static org.opensearch.index.query.AbstractQueryBuilder.parseInnerQueryBui
|
|||
|
||||
public class PercolatorFieldMapper extends ParametrizedFieldMapper {
|
||||
|
||||
static final XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE;
|
||||
|
||||
static final Setting<Boolean> INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING = Setting.boolSetting(
|
||||
"index.percolator.map_unmapped_fields_as_text",
|
||||
false,
|
||||
|
@ -303,7 +299,7 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
|
|||
}
|
||||
|
||||
BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder();
|
||||
if (canUseMinimumShouldMatchField && indexVersion.onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
if (canUseMinimumShouldMatchField) {
|
||||
LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name());
|
||||
for (BytesRef extractedTerm : extractedTerms) {
|
||||
subQueries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm)));
|
||||
|
@ -458,7 +454,6 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
Version indexVersionCreated = context.mapperService().getIndexSettings().getIndexVersionCreated();
|
||||
if (result.matchAllDocs) {
|
||||
doc.add(new Field(extractionResultField.name(), EXTRACTION_FAILED, INDEXED_KEYWORD));
|
||||
if (result.verified) {
|
||||
|
@ -471,9 +466,7 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
|
|||
}
|
||||
|
||||
createFieldNamesField(context);
|
||||
if (indexVersionCreated.onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
|
||||
}
|
||||
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
|
||||
}
|
||||
|
||||
static void configureContext(QueryShardContext context, boolean mapUnmappedFieldsAsString) {
|
||||
|
|
|
@ -53,7 +53,6 @@ import org.apache.lucene.search.spans.SpanTermQuery;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.automaton.ByteRunAutomaton;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.lucene.search.function.FunctionScoreQuery;
|
||||
import org.opensearch.index.query.DateRangeIncludingNowQuery;
|
||||
|
@ -61,11 +60,9 @@ import org.opensearch.index.query.DateRangeIncludingNowQuery;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -229,14 +226,6 @@ final class QueryAnalyzer {
|
|||
boolean verified = isVerified(query);
|
||||
Set<QueryExtraction> qe = Arrays.stream(terms).map(QueryExtraction::new).collect(Collectors.toSet());
|
||||
if (qe.size() > 0) {
|
||||
if (version.before(LegacyESVersion.V_6_1_0) && conjunction) {
|
||||
Optional<QueryExtraction> longest = qe.stream()
|
||||
.filter(q -> q.term != null)
|
||||
.max(Comparator.comparingInt(q -> q.term.bytes().length));
|
||||
if (longest.isPresent()) {
|
||||
qe = Collections.singleton(longest.get());
|
||||
}
|
||||
}
|
||||
this.terms.add(new Result(verified, qe, conjunction ? qe.size() : 1));
|
||||
}
|
||||
}
|
||||
|
@ -300,80 +289,71 @@ final class QueryAnalyzer {
|
|||
if (conjunctionsWithUnknowns.size() == 1) {
|
||||
return conjunctionsWithUnknowns.get(0);
|
||||
}
|
||||
if (version.onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
for (Result subResult : conjunctions) {
|
||||
if (subResult.isMatchNoDocs()) {
|
||||
return subResult;
|
||||
}
|
||||
for (Result subResult : conjunctions) {
|
||||
if (subResult.isMatchNoDocs()) {
|
||||
return subResult;
|
||||
}
|
||||
}
|
||||
|
||||
int msm = 0;
|
||||
boolean verified = conjunctionsWithUnknowns.size() == conjunctions.size();
|
||||
boolean matchAllDocs = true;
|
||||
Set<QueryExtraction> extractions = new HashSet<>();
|
||||
Set<String> seenRangeFields = new HashSet<>();
|
||||
for (Result result : conjunctions) {
|
||||
int msm = 0;
|
||||
boolean verified = conjunctionsWithUnknowns.size() == conjunctions.size();
|
||||
boolean matchAllDocs = true;
|
||||
Set<QueryExtraction> extractions = new HashSet<>();
|
||||
Set<String> seenRangeFields = new HashSet<>();
|
||||
for (Result result : conjunctions) {
|
||||
|
||||
int resultMsm = result.minimumShouldMatch;
|
||||
for (QueryExtraction queryExtraction : result.extractions) {
|
||||
if (queryExtraction.range != null) {
|
||||
// In case of range queries each extraction does not simply increment the
|
||||
// minimum_should_match for that percolator query like for a term based extraction,
|
||||
// so that can lead to more false positives for percolator queries with range queries
|
||||
// than term based queries.
|
||||
// This is because the way number fields are extracted from the document to be
|
||||
// percolated. Per field a single range is extracted and if a percolator query has two or
|
||||
// more range queries on the same field, then the minimum should match can be higher than clauses
|
||||
// in the CoveringQuery. Therefore right now the minimum should match is only incremented once per
|
||||
// number field when processing the percolator query at index time.
|
||||
// For multiple ranges within a single extraction (ie from an existing conjunction or disjunction)
|
||||
// then this will already have been taken care of, so we only check against fieldnames from
|
||||
// previously processed extractions, and don't add to the seenRangeFields list until all
|
||||
// extractions from this result are processed
|
||||
if (seenRangeFields.contains(queryExtraction.range.fieldName)) {
|
||||
resultMsm = Math.max(0, resultMsm - 1);
|
||||
verified = false;
|
||||
}
|
||||
} else {
|
||||
// In case that there are duplicate term query extractions we need to be careful with
|
||||
// incrementing msm, because that could lead to valid matches not becoming candidate matches:
|
||||
// query: (field:val1 AND field:val2) AND (field:val2 AND field:val3)
|
||||
// doc: field: val1 val2 val3
|
||||
// So lets be protective and decrease the msm:
|
||||
if (extractions.contains(queryExtraction)) {
|
||||
resultMsm = Math.max(0, resultMsm - 1);
|
||||
verified = false;
|
||||
}
|
||||
int resultMsm = result.minimumShouldMatch;
|
||||
for (QueryExtraction queryExtraction : result.extractions) {
|
||||
if (queryExtraction.range != null) {
|
||||
// In case of range queries each extraction does not simply increment the
|
||||
// minimum_should_match for that percolator query like for a term based extraction,
|
||||
// so that can lead to more false positives for percolator queries with range queries
|
||||
// than term based queries.
|
||||
// This is because the way number fields are extracted from the document to be
|
||||
// percolated. Per field a single range is extracted and if a percolator query has two or
|
||||
// more range queries on the same field, then the minimum should match can be higher than clauses
|
||||
// in the CoveringQuery. Therefore right now the minimum should match is only incremented once per
|
||||
// number field when processing the percolator query at index time.
|
||||
// For multiple ranges within a single extraction (ie from an existing conjunction or disjunction)
|
||||
// then this will already have been taken care of, so we only check against fieldnames from
|
||||
// previously processed extractions, and don't add to the seenRangeFields list until all
|
||||
// extractions from this result are processed
|
||||
if (seenRangeFields.contains(queryExtraction.range.fieldName)) {
|
||||
resultMsm = Math.max(0, resultMsm - 1);
|
||||
verified = false;
|
||||
}
|
||||
} else {
|
||||
// In case that there are duplicate term query extractions we need to be careful with
|
||||
// incrementing msm, because that could lead to valid matches not becoming candidate matches:
|
||||
// query: (field:val1 AND field:val2) AND (field:val2 AND field:val3)
|
||||
// doc: field: val1 val2 val3
|
||||
// So lets be protective and decrease the msm:
|
||||
if (extractions.contains(queryExtraction)) {
|
||||
resultMsm = Math.max(0, resultMsm - 1);
|
||||
verified = false;
|
||||
}
|
||||
}
|
||||
msm += resultMsm;
|
||||
|
||||
// add range fields from this Result to the seenRangeFields set so that minimumShouldMatch is correctly
|
||||
// calculated for subsequent Results
|
||||
result.extractions.stream().map(e -> e.range).filter(Objects::nonNull).map(e -> e.fieldName).forEach(seenRangeFields::add);
|
||||
|
||||
if (result.verified == false
|
||||
// If some inner extractions are optional, the result can't be verified
|
||||
|| result.minimumShouldMatch < result.extractions.size()) {
|
||||
verified = false;
|
||||
|
||||
}
|
||||
matchAllDocs &= result.matchAllDocs;
|
||||
extractions.addAll(result.extractions);
|
||||
}
|
||||
msm += resultMsm;
|
||||
|
||||
// add range fields from this Result to the seenRangeFields set so that minimumShouldMatch is correctly
|
||||
// calculated for subsequent Results
|
||||
result.extractions.stream().map(e -> e.range).filter(Objects::nonNull).map(e -> e.fieldName).forEach(seenRangeFields::add);
|
||||
|
||||
if (result.verified == false
|
||||
// If some inner extractions are optional, the result can't be verified
|
||||
|| result.minimumShouldMatch < result.extractions.size()) {
|
||||
verified = false;
|
||||
|
||||
if (matchAllDocs) {
|
||||
return new Result(matchAllDocs, verified);
|
||||
} else {
|
||||
return new Result(verified, extractions, msm);
|
||||
}
|
||||
matchAllDocs &= result.matchAllDocs;
|
||||
extractions.addAll(result.extractions);
|
||||
}
|
||||
|
||||
if (matchAllDocs) {
|
||||
return new Result(matchAllDocs, verified);
|
||||
} else {
|
||||
Result bestClause = null;
|
||||
for (Result result : conjunctions) {
|
||||
bestClause = selectBestResult(result, bestClause);
|
||||
}
|
||||
return bestClause;
|
||||
return new Result(verified, extractions, msm);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -386,12 +366,7 @@ final class QueryAnalyzer {
|
|||
}
|
||||
// Keep track of the msm for each clause:
|
||||
List<Integer> clauses = new ArrayList<>(disjunctions.size());
|
||||
boolean verified;
|
||||
if (version.before(LegacyESVersion.V_6_1_0)) {
|
||||
verified = requiredShouldClauses <= 1;
|
||||
} else {
|
||||
verified = true;
|
||||
}
|
||||
boolean verified = true;
|
||||
int numMatchAllClauses = 0;
|
||||
boolean hasRangeExtractions = false;
|
||||
|
||||
|
@ -438,10 +413,10 @@ final class QueryAnalyzer {
|
|||
boolean matchAllDocs = numMatchAllClauses > 0 && numMatchAllClauses >= requiredShouldClauses;
|
||||
|
||||
int msm = 0;
|
||||
if (version.onOrAfter(LegacyESVersion.V_6_1_0) &&
|
||||
// Having ranges would mean we need to juggle with the msm and that complicates this logic a lot,
|
||||
// so for now lets not do it.
|
||||
hasRangeExtractions == false) {
|
||||
if (hasRangeExtractions == false) {
|
||||
|
||||
// Figure out what the combined msm is for this disjunction:
|
||||
// (sum the lowest required clauses, otherwise we're too strict and queries may not match)
|
||||
clauses = clauses.stream().filter(val -> val > 0).sorted().collect(Collectors.toList());
|
||||
|
|
|
@ -90,7 +90,6 @@ import org.apache.lucene.search.spans.SpanTermQuery;
|
|||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.CheckedFunction;
|
||||
|
@ -114,6 +113,7 @@ import org.opensearch.plugins.Plugin;
|
|||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -673,7 +673,7 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase {
|
|||
IndexSearcher shardSearcher = newSearcher(directoryReader);
|
||||
shardSearcher.setQueryCache(null);
|
||||
|
||||
Version v = LegacyESVersion.V_6_1_0;
|
||||
Version v = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer());
|
||||
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
||||
Query query = fieldType.percolateQuery(
|
||||
|
|
|
@ -1130,7 +1130,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
* we will hit a warning exception because we put some deprecated settings in that test.
|
||||
*/
|
||||
if (isRunningAgainstOldCluster() == false
|
||||
&& getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_1_0) && getOldClusterVersion().before(LegacyESVersion.V_6_5_0)) {
|
||||
&& getOldClusterVersion().before(LegacyESVersion.V_6_5_0)) {
|
||||
for (String warning : e.getResponse().getWarnings()) {
|
||||
assertThat(warning, containsString(
|
||||
"setting was deprecated and will be removed in a future release! "
|
||||
|
|
|
@ -58,7 +58,6 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
public class FullClusterRestartSettingsUpgradeIT extends AbstractFullClusterRestartTestCase {
|
||||
|
||||
public void testRemoteClusterSettingsUpgraded() throws IOException {
|
||||
assumeTrue("skip_unavailable did not exist until 6.1.0", getOldClusterVersion().onOrAfter(LegacyESVersion.V_6_1_0));
|
||||
assumeTrue("settings automatically upgraded since 6.5.0", getOldClusterVersion().before(LegacyESVersion.V_6_5_0));
|
||||
if (isRunningAgainstOldCluster()) {
|
||||
final Request putSettingsRequest = new Request("PUT", "/_cluster/settings");
|
||||
|
@ -91,7 +90,7 @@ public class FullClusterRestartSettingsUpgradeIT extends AbstractFullClusterRest
|
|||
SEARCH_REMOTE_CLUSTERS_SEEDS.getConcreteSettingForNamespace("foo").get(settings),
|
||||
equalTo(Collections.singletonList("localhost:9200")));
|
||||
assertTrue(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").exists(settings));
|
||||
assertEquals(String.valueOf(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").get(settings)),
|
||||
assertEquals(String.valueOf(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").get(settings)),
|
||||
"localhost:9200");
|
||||
}
|
||||
|
||||
|
@ -118,7 +117,7 @@ public class FullClusterRestartSettingsUpgradeIT extends AbstractFullClusterRest
|
|||
equalTo(Collections.singletonList("localhost:9200")));
|
||||
assertFalse(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").exists(settings));
|
||||
assertTrue(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").exists(settings));
|
||||
assertEquals(String.valueOf(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").get(settings)),
|
||||
assertEquals(String.valueOf(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace("foo").get(settings)),
|
||||
"localhost:9200");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,11 +46,6 @@ import java.lang.reflect.Field;
|
|||
*/
|
||||
public class LegacyESVersion extends Version {
|
||||
|
||||
public static final LegacyESVersion V_6_1_0 = new LegacyESVersion(6010099, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
public static final LegacyESVersion V_6_1_1 = new LegacyESVersion(6010199, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
public static final LegacyESVersion V_6_1_2 = new LegacyESVersion(6010299, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
public static final LegacyESVersion V_6_1_3 = new LegacyESVersion(6010399, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
public static final LegacyESVersion V_6_1_4 = new LegacyESVersion(6010499, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
// The below version is missing from the 7.3 JAR
|
||||
private static final org.apache.lucene.util.Version LUCENE_7_2_1 = org.apache.lucene.util.Version.fromBits(7, 2, 1);
|
||||
public static final LegacyESVersion V_6_2_0 = new LegacyESVersion(6020099, LUCENE_7_2_1);
|
||||
|
|
|
@ -134,11 +134,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContentFragment {
|
|||
scriptStats = in.readOptionalWriteable(ScriptStats::new);
|
||||
discoveryStats = in.readOptionalWriteable(DiscoveryStats::new);
|
||||
ingestStats = in.readOptionalWriteable(IngestStats::new);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
adaptiveSelectionStats = in.readOptionalWriteable(AdaptiveSelectionStats::new);
|
||||
} else {
|
||||
adaptiveSelectionStats = null;
|
||||
}
|
||||
adaptiveSelectionStats = in.readOptionalWriteable(AdaptiveSelectionStats::new);
|
||||
scriptCacheStats = null;
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) {
|
||||
if (in.getVersion().before(LegacyESVersion.V_7_9_0)) {
|
||||
|
@ -328,9 +324,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContentFragment {
|
|||
out.writeOptionalWriteable(scriptStats);
|
||||
out.writeOptionalWriteable(discoveryStats);
|
||||
out.writeOptionalWriteable(ingestStats);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalWriteable(adaptiveSelectionStats);
|
||||
}
|
||||
out.writeOptionalWriteable(adaptiveSelectionStats);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0) && out.getVersion().before(LegacyESVersion.V_7_9_0)) {
|
||||
out.writeOptionalWriteable(scriptCacheStats);
|
||||
}
|
||||
|
|
|
@ -74,9 +74,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
|||
optionallyAddMetric(in.readBoolean(), Metric.SCRIPT.metricName());
|
||||
optionallyAddMetric(in.readBoolean(), Metric.DISCOVERY.metricName());
|
||||
optionallyAddMetric(in.readBoolean(), Metric.INGEST.metricName());
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
optionallyAddMetric(in.readBoolean(), Metric.ADAPTIVE_SELECTION.metricName());
|
||||
}
|
||||
optionallyAddMetric(in.readBoolean(), Metric.ADAPTIVE_SELECTION.metricName());
|
||||
} else {
|
||||
requestedMetrics.addAll(in.readStringList());
|
||||
}
|
||||
|
@ -212,9 +210,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
|||
out.writeBoolean(Metric.SCRIPT.containedIn(requestedMetrics));
|
||||
out.writeBoolean(Metric.DISCOVERY.containedIn(requestedMetrics));
|
||||
out.writeBoolean(Metric.INGEST.containedIn(requestedMetrics));
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeBoolean(Metric.ADAPTIVE_SELECTION.containedIn(requestedMetrics));
|
||||
}
|
||||
out.writeBoolean(Metric.ADAPTIVE_SELECTION.containedIn(requestedMetrics));
|
||||
} else {
|
||||
out.writeStringArray(requestedMetrics.toArray(new String[0]));
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.open;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.IndicesRequest;
|
||||
import org.opensearch.action.support.ActiveShardCount;
|
||||
|
@ -59,9 +58,7 @@ public class OpenIndexRequest extends AcknowledgedRequest<OpenIndexRequest> impl
|
|||
super(in);
|
||||
indices = in.readStringArray();
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
waitForActiveShards = ActiveShardCount.readFrom(in);
|
||||
}
|
||||
waitForActiveShards = ActiveShardCount.readFrom(in);
|
||||
}
|
||||
|
||||
public OpenIndexRequest() {}
|
||||
|
@ -167,8 +164,6 @@ public class OpenIndexRequest extends AcknowledgedRequest<OpenIndexRequest> impl
|
|||
super.writeTo(out);
|
||||
out.writeStringArray(indices);
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
waitForActiveShards.writeTo(out);
|
||||
}
|
||||
waitForActiveShards.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.open;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.support.master.ShardsAcknowledgedResponse;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -57,7 +56,7 @@ public class OpenIndexResponse extends ShardsAcknowledgedResponse {
|
|||
}
|
||||
|
||||
public OpenIndexResponse(StreamInput in) throws IOException {
|
||||
super(in, in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0), true);
|
||||
super(in, true, true);
|
||||
}
|
||||
|
||||
public OpenIndexResponse(boolean acknowledged, boolean shardsAcknowledged) {
|
||||
|
@ -67,9 +66,7 @@ public class OpenIndexResponse extends ShardsAcknowledgedResponse {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
writeShardsAcknowledged(out);
|
||||
}
|
||||
writeShardsAcknowledged(out);
|
||||
}
|
||||
|
||||
public static OpenIndexResponse fromXContent(XContentParser parser) {
|
||||
|
|
|
@ -32,8 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.rollover;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
import org.opensearch.common.unit.ByteSizeUnit;
|
||||
|
@ -65,11 +63,6 @@ public class MaxSizeCondition extends Condition<ByteSizeValue> {
|
|||
return new Result(this, stats.indexSize.getBytes() >= value.getBytes());
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean includedInVersion(Version version) {
|
||||
return version.onOrAfter(LegacyESVersion.V_6_1_0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
|
|
|
@ -32,15 +32,12 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.shrink;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionType;
|
||||
|
||||
public class ResizeAction extends ActionType<ResizeResponse> {
|
||||
|
||||
public static final ResizeAction INSTANCE = new ResizeAction();
|
||||
public static final String NAME = "indices:admin/resize";
|
||||
public static final Version COMPATIBILITY_VERSION = LegacyESVersion.V_6_1_0; // TODO remove this once it's backported
|
||||
|
||||
private ResizeAction() {
|
||||
super(NAME, ResizeResponse::new);
|
||||
|
|
|
@ -81,11 +81,7 @@ public class ResizeRequest extends AcknowledgedRequest<ResizeRequest> implements
|
|||
super(in);
|
||||
targetIndexRequest = new CreateIndexRequest(in);
|
||||
sourceIndex = in.readString();
|
||||
if (in.getVersion().onOrAfter(ResizeAction.COMPATIBILITY_VERSION)) {
|
||||
type = in.readEnum(ResizeType.class);
|
||||
} else {
|
||||
type = ResizeType.SHRINK; // BWC this used to be shrink only
|
||||
}
|
||||
type = in.readEnum(ResizeType.class);
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_4_0)) {
|
||||
copySettings = null;
|
||||
} else {
|
||||
|
@ -128,12 +124,10 @@ public class ResizeRequest extends AcknowledgedRequest<ResizeRequest> implements
|
|||
super.writeTo(out);
|
||||
targetIndexRequest.writeTo(out);
|
||||
out.writeString(sourceIndex);
|
||||
if (out.getVersion().onOrAfter(ResizeAction.COMPATIBILITY_VERSION)) {
|
||||
if (type == ResizeType.CLONE && out.getVersion().before(LegacyESVersion.V_7_4_0)) {
|
||||
throw new IllegalArgumentException("can't send clone request to a node that's older than " + LegacyESVersion.V_7_4_0);
|
||||
}
|
||||
out.writeEnum(type);
|
||||
if (type == ResizeType.CLONE && out.getVersion().before(LegacyESVersion.V_7_4_0)) {
|
||||
throw new IllegalArgumentException("can't send clone request to a node that's older than " + LegacyESVersion.V_7_4_0);
|
||||
}
|
||||
out.writeEnum(type);
|
||||
// noinspection StatementWithEmptyBody
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_4_0)) {
|
||||
|
||||
|
|
|
@ -250,12 +250,6 @@ public class TransportResizeAction extends TransportMasterNodeAction<ResizeReque
|
|||
|
||||
@Override
|
||||
protected String getMasterActionName(DiscoveryNode node) {
|
||||
if (node.getVersion().onOrAfter(ResizeAction.COMPATIBILITY_VERSION)) {
|
||||
return super.getMasterActionName(node);
|
||||
} else {
|
||||
// this is for BWC - when we send this to version that doesn't have ResizeAction.NAME registered
|
||||
// we have to send to shrink instead.
|
||||
return ShrinkAction.NAME;
|
||||
}
|
||||
return super.getMasterActionName(node);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -103,11 +103,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
|
|||
shardFailures[i] = ShardSearchFailure.readShardSearchFailure(in);
|
||||
}
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
clusters = new Clusters(in);
|
||||
} else {
|
||||
clusters = Clusters.EMPTY;
|
||||
}
|
||||
clusters = new Clusters(in);
|
||||
scrollId = in.readOptionalString();
|
||||
tookInMillis = in.readVLong();
|
||||
skippedShards = in.readVInt();
|
||||
|
@ -460,9 +456,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
|
|||
for (ShardSearchFailure shardSearchFailure : shardFailures) {
|
||||
shardSearchFailure.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
clusters.writeTo(out);
|
||||
}
|
||||
clusters.writeTo(out);
|
||||
out.writeOptionalString(scrollId);
|
||||
out.writeVLong(tookInMillis);
|
||||
out.writeVInt(skippedShards);
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.cluster.routing.allocation.decider;
|
||||
|
||||
import org.opensearch.action.admin.indices.shrink.ResizeAction;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.cluster.routing.RecoverySource;
|
||||
import org.opensearch.cluster.routing.RoutingNode;
|
||||
|
@ -79,9 +78,6 @@ public class ResizeAllocationDecider extends AllocationDecider {
|
|||
return allocation.decision(Decision.NO, NAME, "source primary shard [%s] is not active", shardId);
|
||||
}
|
||||
if (node != null) { // we might get called from the 2 param canAllocate method..
|
||||
if (node.node().getVersion().before(ResizeAction.COMPATIBILITY_VERSION)) {
|
||||
return allocation.decision(Decision.NO, NAME, "node [%s] is too old to split a shard", node.nodeId());
|
||||
}
|
||||
if (sourceShardRouting.currentNodeId().equals(node.nodeId())) {
|
||||
return allocation.decision(Decision.YES, NAME, "source primary is allocated on this node");
|
||||
} else {
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchException;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.Nullable;
|
||||
|
@ -88,11 +87,7 @@ public class Queries {
|
|||
* @param indexVersionCreated the index version created since newer indices can identify a parent field more efficiently
|
||||
*/
|
||||
public static Query newNonNestedFilter(Version indexVersionCreated) {
|
||||
if (indexVersionCreated.onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
return new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME);
|
||||
} else {
|
||||
return new BooleanQuery.Builder().add(new MatchAllDocsQuery(), Occur.FILTER).add(newNestedFilter(), Occur.MUST_NOT).build();
|
||||
}
|
||||
return new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME);
|
||||
}
|
||||
|
||||
public static BooleanQuery filtered(@Nullable Query query, @Nullable Query filter) {
|
||||
|
|
|
@ -34,7 +34,6 @@ package org.opensearch.common.settings;
|
|||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchGenerationException;
|
||||
import org.opensearch.OpenSearchParseException;
|
||||
import org.opensearch.Version;
|
||||
|
@ -556,23 +555,15 @@ public final class Settings implements ToXContentFragment {
|
|||
public static Settings readSettingsFromStream(StreamInput in) throws IOException {
|
||||
Builder builder = new Builder();
|
||||
int numberOfSettings = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
for (int i = 0; i < numberOfSettings; i++) {
|
||||
String key = in.readString();
|
||||
Object value = in.readGenericValue();
|
||||
if (value == null) {
|
||||
builder.putNull(key);
|
||||
} else if (value instanceof List) {
|
||||
builder.putList(key, (List<String>) value);
|
||||
} else {
|
||||
builder.put(key, value.toString());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (int i = 0; i < numberOfSettings; i++) {
|
||||
String key = in.readString();
|
||||
String value = in.readOptionalString();
|
||||
builder.put(key, value);
|
||||
for (int i = 0; i < numberOfSettings; i++) {
|
||||
String key = in.readString();
|
||||
Object value = in.readGenericValue();
|
||||
if (value == null) {
|
||||
builder.putNull(key);
|
||||
} else if (value instanceof List) {
|
||||
builder.putList(key, (List<String>) value);
|
||||
} else {
|
||||
builder.put(key, value.toString());
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
|
@ -581,27 +572,10 @@ public final class Settings implements ToXContentFragment {
|
|||
public static void writeSettingsToStream(Settings settings, StreamOutput out) throws IOException {
|
||||
// pull settings to exclude secure settings in size()
|
||||
Set<Map.Entry<String, Object>> entries = settings.settings.entrySet();
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeVInt(entries.size());
|
||||
for (Map.Entry<String, Object> entry : entries) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeGenericValue(entry.getValue());
|
||||
}
|
||||
} else {
|
||||
int size = entries.stream().mapToInt(e -> e.getValue() instanceof List ? ((List) e.getValue()).size() : 1).sum();
|
||||
out.writeVInt(size);
|
||||
for (Map.Entry<String, Object> entry : entries) {
|
||||
if (entry.getValue() instanceof List) {
|
||||
int idx = 0;
|
||||
for (String value : (List<String>) entry.getValue()) {
|
||||
out.writeString(entry.getKey() + "." + idx++);
|
||||
out.writeOptionalString(value);
|
||||
}
|
||||
} else {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeOptionalString(toString(entry.getValue()));
|
||||
}
|
||||
}
|
||||
out.writeVInt(entries.size());
|
||||
for (Map.Entry<String, Object> entry : entries) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeGenericValue(entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
package org.opensearch.common.unit;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchParseException;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -95,7 +94,7 @@ public final class Fuzziness implements ToXContentFragment, Writeable {
|
|||
*/
|
||||
public Fuzziness(StreamInput in) throws IOException {
|
||||
fuzziness = in.readString();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0) && in.readBoolean()) {
|
||||
if (in.readBoolean()) {
|
||||
lowDistance = in.readVInt();
|
||||
highDistance = in.readVInt();
|
||||
}
|
||||
|
@ -104,17 +103,15 @@ public final class Fuzziness implements ToXContentFragment, Writeable {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fuzziness);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
// we cannot serialize the low/high bounds since the other node does not know about them.
|
||||
// This is a best-effort to not fail queries in case the cluster is being upgraded and users
|
||||
// start using features that are not available on all nodes.
|
||||
if (isAutoWithCustomValues()) {
|
||||
out.writeBoolean(true);
|
||||
out.writeVInt(lowDistance);
|
||||
out.writeVInt(highDistance);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
// we cannot serialize the low/high bounds since the other node does not know about them.
|
||||
// This is a best-effort to not fail queries in case the cluster is being upgraded and users
|
||||
// start using features that are not available on all nodes.
|
||||
if (isAutoWithCustomValues()) {
|
||||
out.writeBoolean(true);
|
||||
out.writeVInt(lowDistance);
|
||||
out.writeVInt(highDistance);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.discovery;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
import org.opensearch.common.io.stream.Writeable;
|
||||
|
@ -55,21 +54,13 @@ public class DiscoveryStats implements Writeable, ToXContentFragment {
|
|||
|
||||
public DiscoveryStats(StreamInput in) throws IOException {
|
||||
queueStats = in.readOptionalWriteable(PendingClusterStateStats::new);
|
||||
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
publishStats = in.readOptionalWriteable(PublishClusterStateStats::new);
|
||||
} else {
|
||||
publishStats = null;
|
||||
}
|
||||
publishStats = in.readOptionalWriteable(PublishClusterStateStats::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalWriteable(queueStats);
|
||||
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalWriteable(publishStats);
|
||||
}
|
||||
out.writeOptionalWriteable(publishStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.lucene.search.SortedSetSelector;
|
|||
import org.apache.lucene.search.SortedNumericSelector;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Accountables;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.Nullable;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -89,10 +88,8 @@ public class Segment implements Writeable {
|
|||
ramTree = readRamTree(in);
|
||||
}
|
||||
segmentSort = readSegmentSort(in);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0) && in.readBoolean()) {
|
||||
if (in.readBoolean()) {
|
||||
attributes = in.readMap(StreamInput::readString, StreamInput::readString);
|
||||
} else {
|
||||
attributes = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -204,12 +201,10 @@ public class Segment implements Writeable {
|
|||
writeRamTree(out, ramTree);
|
||||
}
|
||||
writeSegmentSort(out, segmentSort);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
boolean hasAttributes = attributes != null;
|
||||
out.writeBoolean(hasAttributes);
|
||||
if (hasAttributes) {
|
||||
out.writeMap(attributes, StreamOutput::writeString, StreamOutput::writeString);
|
||||
}
|
||||
boolean hasAttributes = attributes != null;
|
||||
out.writeBoolean(hasAttributes);
|
||||
if (hasAttributes) {
|
||||
out.writeMap(attributes, StreamOutput::writeString, StreamOutput::writeString);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -32,20 +32,15 @@
|
|||
|
||||
package org.opensearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.Explicit;
|
||||
import org.opensearch.common.logging.DeprecationLogger;
|
||||
import org.opensearch.index.query.QueryShardContext;
|
||||
import org.opensearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -180,36 +175,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
return (FieldNamesFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postParse(ParseContext context) throws IOException {
|
||||
if (context.indexSettings().getIndexVersionCreated().before(LegacyESVersion.V_6_1_0)) {
|
||||
if (fieldType().isEnabled() == false) {
|
||||
return;
|
||||
}
|
||||
for (ParseContext.Document document : context) {
|
||||
final List<String> paths = new ArrayList<>(document.getFields().size());
|
||||
String previousPath = ""; // used as a sentinel - field names can't be empty
|
||||
for (IndexableField field : document.getFields()) {
|
||||
final String path = field.name();
|
||||
if (path.equals(previousPath)) {
|
||||
// Sometimes mappers create multiple Lucene fields, eg. one for indexing,
|
||||
// one for doc values and one for storing. Deduplicating is not required
|
||||
// for correctness but this simple check helps save utf-8 conversions and
|
||||
// gives Lucene fewer values to deal with.
|
||||
continue;
|
||||
}
|
||||
paths.add(path);
|
||||
previousPath = path;
|
||||
}
|
||||
for (String path : paths) {
|
||||
for (String fieldName : extractFieldNames(path)) {
|
||||
document.add(new Field(fieldType().name(), fieldName, Defaults.FIELD_TYPE));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static Iterable<String> extractFieldNames(final String fullPath) {
|
||||
return new Iterable<String>() {
|
||||
@Override
|
||||
|
|
|
@ -38,8 +38,6 @@ import org.apache.lucene.document.NumericDocValuesField;
|
|||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.Nullable;
|
||||
import org.opensearch.index.fielddata.IndexFieldData;
|
||||
import org.opensearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
|
@ -215,15 +213,9 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
|
|||
// we share the parent docs fields to ensure good compression
|
||||
SequenceIDFields seqID = context.seqID();
|
||||
assert seqID != null;
|
||||
final Version versionCreated = context.mapperService().getIndexSettings().getIndexVersionCreated();
|
||||
final boolean includePrimaryTerm = versionCreated.before(LegacyESVersion.V_6_1_0);
|
||||
for (Document doc : context.nonRootDocuments()) {
|
||||
doc.add(seqID.seqNo);
|
||||
doc.add(seqID.seqNoDocValue);
|
||||
if (includePrimaryTerm) {
|
||||
// primary terms are used to distinguish between parent and nested docs since 6.1.0
|
||||
doc.add(seqID.primaryTerm);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
|||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.Strings;
|
||||
|
@ -172,10 +171,6 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
|
|||
}
|
||||
}
|
||||
|
||||
if (context.indexVersionCreated().before(LegacyESVersion.V_6_1_0)) {
|
||||
return newLegacyExistsQuery(context, fields);
|
||||
}
|
||||
|
||||
if (fields.size() == 1) {
|
||||
String field = fields.iterator().next();
|
||||
return newFieldExistsQuery(context, field);
|
||||
|
@ -188,22 +183,6 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
|
|||
return new ConstantScoreQuery(boolFilterBuilder.build());
|
||||
}
|
||||
|
||||
private static Query newLegacyExistsQuery(QueryShardContext context, Collection<String> fields) {
|
||||
// We create TermsQuery directly here rather than using FieldNamesFieldType.termsQuery()
|
||||
// so we don't end up with deprecation warnings
|
||||
if (fields.size() == 1) {
|
||||
Query filter = newLegacyExistsQuery(context, fields.iterator().next());
|
||||
return new ConstantScoreQuery(filter);
|
||||
}
|
||||
|
||||
BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
|
||||
for (String field : fields) {
|
||||
Query filter = newLegacyExistsQuery(context, field);
|
||||
boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return new ConstantScoreQuery(boolFilterBuilder.build());
|
||||
}
|
||||
|
||||
private static Query newLegacyExistsQuery(QueryShardContext context, String field) {
|
||||
MappedFieldType fieldType = context.fieldMapper(field);
|
||||
String fieldName = fieldType != null ? fieldType.name() : field;
|
||||
|
|
|
@ -34,7 +34,6 @@ package org.opensearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -147,9 +146,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
fuzzyRewrite = in.readOptionalString();
|
||||
fuzziness = in.readOptionalWriteable(Fuzziness::new);
|
||||
cutoffFrequency = in.readOptionalFloat();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
autoGenerateSynonymsPhraseQuery = in.readBoolean();
|
||||
}
|
||||
autoGenerateSynonymsPhraseQuery = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -168,9 +165,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
out.writeOptionalString(fuzzyRewrite);
|
||||
out.writeOptionalWriteable(fuzziness);
|
||||
out.writeOptionalFloat(cutoffFrequency);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeBoolean(autoGenerateSynonymsPhraseQuery);
|
||||
}
|
||||
out.writeBoolean(autoGenerateSynonymsPhraseQuery);
|
||||
}
|
||||
|
||||
/** Returns the field name used in this query. */
|
||||
|
|
|
@ -251,17 +251,11 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
in.readOptionalBoolean(); // unused use_dis_max flag
|
||||
}
|
||||
tieBreaker = in.readOptionalFloat();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
lenient = in.readOptionalBoolean();
|
||||
} else {
|
||||
lenient = in.readBoolean();
|
||||
}
|
||||
lenient = in.readOptionalBoolean();
|
||||
cutoffFrequency = in.readOptionalFloat();
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.readFromStream(in);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
autoGenerateSynonymsPhraseQuery = in.readBoolean();
|
||||
fuzzyTranspositions = in.readBoolean();
|
||||
}
|
||||
autoGenerateSynonymsPhraseQuery = in.readBoolean();
|
||||
fuzzyTranspositions = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -285,17 +279,11 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
out.writeOptionalBoolean(null);
|
||||
}
|
||||
out.writeOptionalFloat(tieBreaker);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalBoolean(lenient);
|
||||
} else {
|
||||
out.writeBoolean(lenient == null ? MatchQuery.DEFAULT_LENIENCY : lenient);
|
||||
}
|
||||
out.writeOptionalBoolean(lenient);
|
||||
out.writeOptionalFloat(cutoffFrequency);
|
||||
zeroTermsQuery.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeBoolean(autoGenerateSynonymsPhraseQuery);
|
||||
out.writeBoolean(fuzzyTranspositions);
|
||||
}
|
||||
out.writeBoolean(autoGenerateSynonymsPhraseQuery);
|
||||
out.writeBoolean(fuzzyTranspositions);
|
||||
}
|
||||
|
||||
public Object value() {
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.apache.lucene.search.BoostQuery;
|
|||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.automaton.Operations;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -208,10 +207,8 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
timeZone = in.readOptionalZoneId();
|
||||
escape = in.readBoolean();
|
||||
maxDeterminizedStates = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
autoGenerateSynonymsPhraseQuery = in.readBoolean();
|
||||
fuzzyTranspositions = in.readBoolean();
|
||||
}
|
||||
autoGenerateSynonymsPhraseQuery = in.readBoolean();
|
||||
fuzzyTranspositions = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -243,10 +240,8 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
out.writeOptionalZoneId(timeZone);
|
||||
out.writeBoolean(this.escape);
|
||||
out.writeVInt(this.maxDeterminizedStates);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeBoolean(autoGenerateSynonymsPhraseQuery);
|
||||
out.writeBoolean(fuzzyTranspositions);
|
||||
}
|
||||
out.writeBoolean(autoGenerateSynonymsPhraseQuery);
|
||||
out.writeBoolean(fuzzyTranspositions);
|
||||
}
|
||||
|
||||
public String queryString() {
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.index.query;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.Strings;
|
||||
|
@ -181,12 +180,10 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
settings.analyzeWildcard(in.readBoolean());
|
||||
minimumShouldMatch = in.readOptionalString();
|
||||
settings.quoteFieldSuffix(in.readOptionalString());
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
settings.autoGenerateSynonymsPhraseQuery(in.readBoolean());
|
||||
settings.fuzzyPrefixLength(in.readVInt());
|
||||
settings.fuzzyMaxExpansions(in.readVInt());
|
||||
settings.fuzzyTranspositions(in.readBoolean());
|
||||
}
|
||||
settings.autoGenerateSynonymsPhraseQuery(in.readBoolean());
|
||||
settings.fuzzyPrefixLength(in.readVInt());
|
||||
settings.fuzzyMaxExpansions(in.readVInt());
|
||||
settings.fuzzyTranspositions(in.readBoolean());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -205,12 +202,10 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
out.writeBoolean(settings.analyzeWildcard());
|
||||
out.writeOptionalString(minimumShouldMatch);
|
||||
out.writeOptionalString(settings.quoteFieldSuffix());
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeBoolean(settings.autoGenerateSynonymsPhraseQuery());
|
||||
out.writeVInt(settings.fuzzyPrefixLength());
|
||||
out.writeVInt(settings.fuzzyMaxExpansions());
|
||||
out.writeBoolean(settings.fuzzyTranspositions());
|
||||
}
|
||||
out.writeBoolean(settings.autoGenerateSynonymsPhraseQuery());
|
||||
out.writeVInt(settings.fuzzyPrefixLength());
|
||||
out.writeVInt(settings.fuzzyMaxExpansions());
|
||||
out.writeBoolean(settings.fuzzyTranspositions());
|
||||
}
|
||||
|
||||
/** Returns the text to parse the query from. */
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.index.reindex;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.ActionRequest;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.search.SearchRequest;
|
||||
|
@ -491,18 +490,7 @@ public abstract class AbstractBulkByScrollRequest<Self extends AbstractBulkByScr
|
|||
out.writeTimeValue(retryBackoffInitialTime);
|
||||
out.writeVInt(maxRetries);
|
||||
out.writeFloat(requestsPerSecond);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_1_0) && slices == AUTO_SLICES) {
|
||||
throw new IllegalArgumentException(
|
||||
"Slices set as \"auto\" are not supported before version ["
|
||||
+ LegacyESVersion.V_6_1_0
|
||||
+ "]. "
|
||||
+ "Found version ["
|
||||
+ out.getVersion()
|
||||
+ "]"
|
||||
);
|
||||
} else {
|
||||
out.writeVInt(slices);
|
||||
}
|
||||
out.writeVInt(slices);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.index.shard;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
import org.opensearch.common.io.stream.Writeable;
|
||||
|
@ -55,11 +54,7 @@ public class DocsStats implements Writeable, ToXContentFragment {
|
|||
public DocsStats(StreamInput in) throws IOException {
|
||||
count = in.readVLong();
|
||||
deleted = in.readVLong();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
totalSizeInBytes = in.readVLong();
|
||||
} else {
|
||||
totalSizeInBytes = -1;
|
||||
}
|
||||
totalSizeInBytes = in.readVLong();
|
||||
}
|
||||
|
||||
public DocsStats(long count, long deleted, long totalSizeInBytes) {
|
||||
|
@ -109,9 +104,7 @@ public class DocsStats implements Writeable, ToXContentFragment {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(count);
|
||||
out.writeVLong(deleted);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeVLong(totalSizeInBytes);
|
||||
}
|
||||
out.writeVLong(totalSizeInBytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -34,7 +34,6 @@ package org.opensearch.monitor.os;
|
|||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
import org.opensearch.common.io.stream.Writeable;
|
||||
|
@ -468,15 +467,9 @@ public class OsStats implements Writeable, ToXContentFragment {
|
|||
cpuCfsPeriodMicros = in.readLong();
|
||||
cpuCfsQuotaMicros = in.readLong();
|
||||
cpuStat = new CpuStat(in);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
memoryControlGroup = in.readOptionalString();
|
||||
memoryLimitInBytes = in.readOptionalString();
|
||||
memoryUsageInBytes = in.readOptionalString();
|
||||
} else {
|
||||
memoryControlGroup = null;
|
||||
memoryLimitInBytes = null;
|
||||
memoryUsageInBytes = null;
|
||||
}
|
||||
memoryControlGroup = in.readOptionalString();
|
||||
memoryLimitInBytes = in.readOptionalString();
|
||||
memoryUsageInBytes = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -487,11 +480,9 @@ public class OsStats implements Writeable, ToXContentFragment {
|
|||
out.writeLong(cpuCfsPeriodMicros);
|
||||
out.writeLong(cpuCfsQuotaMicros);
|
||||
cpuStat.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalString(memoryControlGroup);
|
||||
out.writeOptionalString(memoryLimitInBytes);
|
||||
out.writeOptionalString(memoryUsageInBytes);
|
||||
}
|
||||
out.writeOptionalString(memoryControlGroup);
|
||||
out.writeOptionalString(memoryLimitInBytes);
|
||||
out.writeOptionalString(memoryUsageInBytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -157,9 +157,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||
order = in.readOptionalWriteable(SortOrder::readFromStream);
|
||||
sortMode = in.readOptionalWriteable(SortMode::readFromStream);
|
||||
unmappedType = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
nestedSort = in.readOptionalWriteable(NestedSortBuilder::new);
|
||||
}
|
||||
nestedSort = in.readOptionalWriteable(NestedSortBuilder::new);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
|
||||
numericType = in.readOptionalString();
|
||||
}
|
||||
|
@ -174,9 +172,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||
out.writeOptionalWriteable(order);
|
||||
out.writeOptionalWriteable(sortMode);
|
||||
out.writeOptionalString(unmappedType);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalWriteable(nestedSort);
|
||||
}
|
||||
out.writeOptionalWriteable(nestedSort);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_2_0)) {
|
||||
out.writeOptionalString(numericType);
|
||||
}
|
||||
|
|
|
@ -187,9 +187,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
sortMode = in.readOptionalWriteable(SortMode::readFromStream);
|
||||
nestedFilter = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
nestedPath = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
nestedSort = in.readOptionalWriteable(NestedSortBuilder::new);
|
||||
}
|
||||
nestedSort = in.readOptionalWriteable(NestedSortBuilder::new);
|
||||
validation = GeoValidationMethod.readFromStream(in);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
|
||||
ignoreUnmapped = in.readBoolean();
|
||||
|
@ -206,9 +204,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
out.writeOptionalWriteable(sortMode);
|
||||
out.writeOptionalNamedWriteable(nestedFilter);
|
||||
out.writeOptionalString(nestedPath);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalWriteable(nestedSort);
|
||||
}
|
||||
out.writeOptionalWriteable(nestedSort);
|
||||
validation.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
|
||||
out.writeBoolean(ignoreUnmapped);
|
||||
|
|
|
@ -135,9 +135,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
sortMode = in.readOptionalWriteable(SortMode::readFromStream);
|
||||
nestedPath = in.readOptionalString();
|
||||
nestedFilter = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
nestedSort = in.readOptionalWriteable(NestedSortBuilder::new);
|
||||
}
|
||||
nestedSort = in.readOptionalWriteable(NestedSortBuilder::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -148,9 +146,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
out.writeOptionalWriteable(sortMode);
|
||||
out.writeOptionalString(nestedPath);
|
||||
out.writeOptionalNamedWriteable(nestedFilter);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalWriteable(nestedSort);
|
||||
}
|
||||
out.writeOptionalWriteable(nestedSort);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -34,7 +34,6 @@ package org.opensearch.search.suggest.completion;
|
|||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -98,9 +97,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
|||
|
||||
public CompletionSuggestion(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
skipDuplicates = in.readBoolean();
|
||||
}
|
||||
skipDuplicates = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,9 +108,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeBoolean(skipDuplicates);
|
||||
}
|
||||
out.writeBoolean(skipDuplicates);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
package org.opensearch.search.suggest.completion;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchParseException;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
|
@ -147,9 +146,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
|
|||
fuzzyOptions = in.readOptionalWriteable(FuzzyOptions::new);
|
||||
regexOptions = in.readOptionalWriteable(RegexOptions::new);
|
||||
contextBytes = in.readOptionalBytesReference();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
skipDuplicates = in.readBoolean();
|
||||
}
|
||||
skipDuplicates = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -157,9 +154,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
|
|||
out.writeOptionalWriteable(fuzzyOptions);
|
||||
out.writeOptionalWriteable(regexOptions);
|
||||
out.writeOptionalBytesReference(contextBytes);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeBoolean(skipDuplicates);
|
||||
}
|
||||
out.writeBoolean(skipDuplicates);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -33,13 +33,11 @@
|
|||
package org.opensearch.common.lucene.search;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.index.mapper.SeqNoFieldMapper;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
@ -52,16 +50,7 @@ public class QueriesTests extends OpenSearchTestCase {
|
|||
// This is a custom query that extends AutomatonQuery and want to make sure the equals method works
|
||||
assertEquals(Queries.newNonNestedFilter(version), Queries.newNonNestedFilter(version));
|
||||
assertEquals(Queries.newNonNestedFilter(version).hashCode(), Queries.newNonNestedFilter(version).hashCode());
|
||||
if (version.onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
assertEquals(Queries.newNonNestedFilter(version), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME));
|
||||
} else {
|
||||
assertEquals(
|
||||
Queries.newNonNestedFilter(version),
|
||||
new BooleanQuery.Builder().add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER)
|
||||
.add(Queries.newNestedFilter(), BooleanClause.Occur.MUST_NOT)
|
||||
.build()
|
||||
);
|
||||
}
|
||||
assertEquals(Queries.newNonNestedFilter(version), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -634,7 +634,7 @@ public class SettingsTests extends OpenSearchTestCase {
|
|||
|
||||
public void testReadWriteArray() throws IOException {
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(randomFrom(Version.CURRENT, LegacyESVersion.V_6_1_0));
|
||||
output.setVersion(randomFrom(Version.CURRENT, LegacyESVersion.V_7_0_0));
|
||||
Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build();
|
||||
Settings.writeSettingsToStream(settings, output);
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.NormsFieldExistsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -81,26 +80,7 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase<ExistsQueryBu
|
|||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
return;
|
||||
}
|
||||
if (context.getIndexSettings().getIndexVersionCreated().before(LegacyESVersion.V_6_1_0)) {
|
||||
if (fields.size() == 1) {
|
||||
assertThat(query, instanceOf(ConstantScoreQuery.class));
|
||||
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query;
|
||||
String field = expectedFieldName(fields.iterator().next());
|
||||
assertThat(constantScoreQuery.getQuery(), instanceOf(TermQuery.class));
|
||||
TermQuery termQuery = (TermQuery) constantScoreQuery.getQuery();
|
||||
assertEquals(field, termQuery.getTerm().text());
|
||||
} else {
|
||||
assertThat(query, instanceOf(ConstantScoreQuery.class));
|
||||
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query;
|
||||
assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanQuery = (BooleanQuery) constantScoreQuery.getQuery();
|
||||
assertThat(booleanQuery.clauses().size(), equalTo(mappedFields.size()));
|
||||
for (int i = 0; i < mappedFields.size(); i++) {
|
||||
BooleanClause booleanClause = booleanQuery.clauses().get(i);
|
||||
assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD));
|
||||
}
|
||||
}
|
||||
} else if (fields.size() == 1) {
|
||||
if (fields.size() == 1) {
|
||||
assertThat(query, instanceOf(ConstantScoreQuery.class));
|
||||
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query;
|
||||
String field = expectedFieldName(fields.iterator().next());
|
||||
|
|
|
@ -66,7 +66,6 @@ import org.apache.lucene.util.automaton.Automata;
|
|||
import org.apache.lucene.util.automaton.Automaton;
|
||||
import org.apache.lucene.util.automaton.Operations;
|
||||
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.Strings;
|
||||
|
@ -1039,8 +1038,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
QueryShardContext context = createShardContext();
|
||||
QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder(TEXT_FIELD_NAME + ":*");
|
||||
Query query = queryBuilder.toQuery(context);
|
||||
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(LegacyESVersion.V_6_1_0)
|
||||
&& (context.getMapperService().fieldType(TEXT_FIELD_NAME).getTextSearchInfo().hasNorms())) {
|
||||
if ((context.getMapperService().fieldType(TEXT_FIELD_NAME).getTextSearchInfo().hasNorms())) {
|
||||
assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(TEXT_FIELD_NAME))));
|
||||
} else {
|
||||
assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", TEXT_FIELD_NAME)))));
|
||||
|
@ -1050,8 +1048,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
String value = (quoted ? "\"" : "") + TEXT_FIELD_NAME + (quoted ? "\"" : "");
|
||||
queryBuilder = new QueryStringQueryBuilder("_exists_:" + value);
|
||||
query = queryBuilder.toQuery(context);
|
||||
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(LegacyESVersion.V_6_1_0)
|
||||
&& (context.getMapperService().fieldType(TEXT_FIELD_NAME).getTextSearchInfo().hasNorms())) {
|
||||
if ((context.getMapperService().fieldType(TEXT_FIELD_NAME).getTextSearchInfo().hasNorms())) {
|
||||
assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(TEXT_FIELD_NAME))));
|
||||
} else {
|
||||
assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", TEXT_FIELD_NAME)))));
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.apache.lucene.search.PointRangeQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchParseException;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.geo.ShapeRelation;
|
||||
|
@ -164,15 +163,13 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
String expectedFieldName = expectedFieldName(queryBuilder.fieldName());
|
||||
if (queryBuilder.from() == null && queryBuilder.to() == null) {
|
||||
final Query expectedQuery;
|
||||
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(LegacyESVersion.V_6_1_0)
|
||||
&& context.getMapperService().fieldType(queryBuilder.fieldName()).hasDocValues()) {
|
||||
if (context.getMapperService().fieldType(queryBuilder.fieldName()).hasDocValues()) {
|
||||
expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(expectedFieldName));
|
||||
} else if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(LegacyESVersion.V_6_1_0)
|
||||
&& context.getMapperService().fieldType(queryBuilder.fieldName()).getTextSearchInfo().hasNorms()) {
|
||||
expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(expectedFieldName));
|
||||
} else {
|
||||
expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName)));
|
||||
}
|
||||
} else if (context.getMapperService().fieldType(queryBuilder.fieldName()).getTextSearchInfo().hasNorms()) {
|
||||
expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(expectedFieldName));
|
||||
} else {
|
||||
expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName)));
|
||||
}
|
||||
assertThat(query, equalTo(expectedQuery));
|
||||
} else if (expectedFieldName.equals(DATE_FIELD_NAME) == false
|
||||
&& expectedFieldName.equals(INT_FIELD_NAME) == false
|
||||
|
@ -472,8 +469,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
// Range query with open bounds rewrite to an exists query
|
||||
Query luceneQuery = rewrittenRange.toQuery(queryShardContext);
|
||||
final Query expectedQuery;
|
||||
if (queryShardContext.getIndexSettings().getIndexVersionCreated().onOrAfter(LegacyESVersion.V_6_1_0)
|
||||
&& queryShardContext.fieldMapper(query.fieldName()).hasDocValues()) {
|
||||
if (queryShardContext.fieldMapper(query.fieldName()).hasDocValues()) {
|
||||
expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(query.fieldName()));
|
||||
} else {
|
||||
expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, query.fieldName())));
|
||||
|
|
|
@ -33,7 +33,6 @@ package org.opensearch.transport;
|
|||
|
||||
import org.apache.lucene.search.TotalHits;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionListener;
|
||||
import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsAction;
|
||||
|
@ -68,7 +67,6 @@ import org.opensearch.search.SearchHits;
|
|||
import org.opensearch.search.aggregations.InternalAggregations;
|
||||
import org.opensearch.search.internal.InternalSearchResponse;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
import org.opensearch.test.transport.MockTransportService;
|
||||
import org.opensearch.threadpool.TestThreadPool;
|
||||
import org.opensearch.threadpool.ThreadPool;
|
||||
|
@ -80,7 +78,6 @@ import java.net.ServerSocket;
|
|||
import java.net.Socket;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.BrokenBarrierException;
|
||||
|
@ -441,36 +438,6 @@ public class RemoteClusterConnectionTests extends OpenSearchTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testRemoteConnectionInfoBwComp() throws IOException {
|
||||
final Version version = VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_1_0,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
);
|
||||
SniffConnectionStrategy.SniffModeInfo modeInfo = new SniffConnectionStrategy.SniffModeInfo(Arrays.asList("0.0.0.0:1"), 4, 4);
|
||||
RemoteConnectionInfo expected = new RemoteConnectionInfo("test_cluster", modeInfo, new TimeValue(30, TimeUnit.MINUTES), false);
|
||||
|
||||
// This version was created using the serialization code in use from 6.1 but before 7.0
|
||||
String encoded = "AQQAAAAABzAuMC4wLjAAAAABAQQAAAAABzAuMC4wLjAAAABQBDwEBAx0ZXN0X2NsdXN0ZXIA";
|
||||
final byte[] data = Base64.getDecoder().decode(encoded);
|
||||
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
RemoteConnectionInfo deserialized = new RemoteConnectionInfo(in);
|
||||
assertEquals(expected, deserialized);
|
||||
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
deserialized.writeTo(out);
|
||||
try (StreamInput in2 = StreamInput.wrap(out.bytes().toBytesRef().bytes)) {
|
||||
in2.setVersion(version);
|
||||
RemoteConnectionInfo deserialized2 = new RemoteConnectionInfo(in2);
|
||||
assertEquals(expected, deserialized2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testRenderConnectionInfoXContent() throws IOException {
|
||||
List<String> remoteAddresses = Arrays.asList("seed:1", "seed:2");
|
||||
String serverName = "the_server_name";
|
||||
|
|
|
@ -87,12 +87,16 @@ public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCa
|
|||
}
|
||||
|
||||
public void testParseSkipSectionVersionNoFeature() throws Exception {
|
||||
parser = createParser(YamlXContent.yamlXContent, "version: \" - 6.1.1\"\n" + "reason: Delete ignores the parent param");
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
parser = createParser(
|
||||
YamlXContent.yamlXContent,
|
||||
"version: \" - " + version + "\"\n" + "reason: Delete ignores the parent param"
|
||||
);
|
||||
|
||||
SkipSection skipSection = SkipSection.parse(parser);
|
||||
assertThat(skipSection, notNullValue());
|
||||
assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion()));
|
||||
assertThat(skipSection.getUpperVersion(), equalTo(LegacyESVersion.V_6_1_1));
|
||||
assertThat(skipSection.getUpperVersion(), equalTo(version));
|
||||
assertThat(skipSection.getFeatures().size(), equalTo(0));
|
||||
assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param"));
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue