Remove 6.0.* version constants (#1658)
This PR removes LegacyESVersion.V_6_0_* constants including all pre-release versions and bug fixes. Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
parent
e983facc16
commit
b74d71fb74
|
@ -1250,9 +1250,7 @@ public class OpenSearchNode implements TestClusterConfiguration {
|
|||
} else {
|
||||
baseConfig.put("script.max_compilations_rate", "2048/1m");
|
||||
}
|
||||
if (getVersion().onOrAfter("6.0.0")) {
|
||||
baseConfig.put("cluster.routing.allocation.disk.watermark.flood_stage", "1b");
|
||||
}
|
||||
// Temporarily disable the real memory usage circuit breaker. It depends on real memory usage which we have no full control
|
||||
// over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client
|
||||
// can retry on circuit breaking exceptions, we can revert again to the default configuration.
|
||||
|
|
|
@ -53,7 +53,7 @@ public class MainResponseTests extends AbstractResponseTestCase<org.opensearch.a
|
|||
ClusterName clusterName = new ClusterName(randomAlphaOfLength(10));
|
||||
String nodeName = randomAlphaOfLength(10);
|
||||
final String date = new Date(randomNonNegativeLong()).toString();
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_1, Version.CURRENT);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
|
||||
Build build = new Build(
|
||||
Build.Type.UNKNOWN,
|
||||
randomAlphaOfLength(8),
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
|
@ -40,8 +39,6 @@ import org.opensearch.cluster.metadata.IndexMetadata;
|
|||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.env.Environment;
|
||||
import org.opensearch.index.IndexSettings;
|
||||
import org.opensearch.index.analysis.IndexAnalyzers;
|
||||
import org.opensearch.index.analysis.NamedAnalyzer;
|
||||
import org.opensearch.index.analysis.TokenizerFactory;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
import org.opensearch.test.IndexSettingsModule;
|
||||
|
@ -52,35 +49,6 @@ import java.util.Map;
|
|||
|
||||
public class CommonAnalysisPluginTests extends OpenSearchTestCase {
|
||||
|
||||
/**
|
||||
* Check that the deprecated name "nGram" issues a deprecation warning for indices created since 6.0.0
|
||||
*/
|
||||
public void testNGramDeprecationWarning() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
)
|
||||
)
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
.putList("index.analysis.analyzer.custom_analyzer.filter", "nGram")
|
||||
.build();
|
||||
|
||||
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||
createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin);
|
||||
}
|
||||
|
||||
assertWarnings(
|
||||
"The [nGram] token filter name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the filter name to [ngram] instead."
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the deprecated name "nGram" throws an error since 7.0.0
|
||||
*/
|
||||
|
@ -106,41 +74,16 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the deprecated name "edgeNGram" issues a deprecation warning for indices created since 6.0.0
|
||||
*/
|
||||
public void testEdgeNGramDeprecationWarning() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_4_0,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
)
|
||||
)
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
.putList("index.analysis.analyzer.custom_analyzer.filter", "edgeNGram")
|
||||
.build();
|
||||
|
||||
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||
createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin);
|
||||
}
|
||||
assertWarnings(
|
||||
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
|
||||
+ "Please change the filter name to [edge_ngram] instead."
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the deprecated name "edgeNGram" throws an error for indices created since 7.0.0
|
||||
*/
|
||||
public void testEdgeNGramDeprecationError() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, null))
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)
|
||||
)
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
.putList("index.analysis.analyzer.custom_analyzer.filter", "edgeNGram")
|
||||
|
@ -186,36 +129,6 @@ public class CommonAnalysisPluginTests extends OpenSearchTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the deprecated analyzer name "standard_html_strip" issues a deprecation warning for indices created since 6.5.0 until 7
|
||||
*/
|
||||
public void testStandardHtmlStripAnalyzerDeprecationWarning() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
)
|
||||
)
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip")
|
||||
.putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b")
|
||||
.build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||
IndexAnalyzers analyzers = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).indexAnalyzers;
|
||||
Analyzer analyzer = analyzers.get("custom_analyzer");
|
||||
assertNotNull(((NamedAnalyzer) analyzer).analyzer());
|
||||
assertWarnings(
|
||||
"Deprecated analyzer [standard_html_strip] used, "
|
||||
+ "replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the deprecated "nGram" filter logs a warning when the filter is used as a custom filter
|
||||
*/
|
||||
|
|
|
@ -72,24 +72,4 @@ public class HtmlStripCharFilterFactoryTests extends OpenSearchTestCase {
|
|||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the deprecated name "htmlStrip" does NOT issues a deprecation warning for indices created before 6.3.0
|
||||
*/
|
||||
public void testNoDeprecationWarningPre6_3() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_2_4)
|
||||
)
|
||||
.build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||
Map<String, CharFilterFactory> charFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).charFilter;
|
||||
CharFilterFactory charFilterFactory = charFilters.get("htmlStrip");
|
||||
assertNotNull(charFilterFactory.create(new StringReader("")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,11 +56,9 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -317,32 +315,6 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase {
|
|||
tf.get(idxSettings, null, tf.getName(), settings).getSynonymFilter();
|
||||
}
|
||||
}
|
||||
|
||||
Settings settings2 = Settings.builder()
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
)
|
||||
)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.putList("common_words", "a", "b")
|
||||
.put("output_unigrams", "true")
|
||||
.build();
|
||||
IndexSettings idxSettings2 = IndexSettingsModule.newIndexSettings("index", settings2);
|
||||
|
||||
List<String> expectedWarnings = new ArrayList<>();
|
||||
for (PreConfiguredTokenFilter tf : plugin.getPreConfiguredTokenFilters()) {
|
||||
if (disallowedFilters.contains(tf.getName())) {
|
||||
tf.get(idxSettings2, null, tf.getName(), settings2).getSynonymFilter();
|
||||
expectedWarnings.add("Token filter [" + tf.getName() + "] will not be usable to parse synonyms after v7.0");
|
||||
} else {
|
||||
tf.get(idxSettings2, null, tf.getName(), settings2).getSynonymFilter();
|
||||
}
|
||||
}
|
||||
assertWarnings(expectedWarnings.toArray(new String[0]));
|
||||
}
|
||||
|
||||
public void testDisallowedTokenFilters() throws IOException {
|
||||
|
@ -382,58 +354,6 @@ public class SynonymsAnalysisTests extends OpenSearchTestCase {
|
|||
|
||||
assertEquals(factory, "Token filter [" + factory + "] cannot be used to parse synonyms", e.getMessage());
|
||||
}
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
)
|
||||
)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.putList("common_words", "a", "b")
|
||||
.put("output_unigrams", "true")
|
||||
.build();
|
||||
idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
||||
List<String> expectedWarnings = new ArrayList<>();
|
||||
for (String factory : disallowedFactories) {
|
||||
TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings);
|
||||
TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings);
|
||||
SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings);
|
||||
|
||||
stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null);
|
||||
expectedWarnings.add("Token filter [" + factory + "] will not be usable to parse synonyms after v7.0");
|
||||
}
|
||||
|
||||
assertWarnings(expectedWarnings.toArray(new String[0]));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
)
|
||||
)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("preserve_original", "false")
|
||||
.build();
|
||||
idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
TokenFilterFactory tff = plugin.getTokenFilters().get("multiplexer").get(idxSettings, null, "multiplexer", settings);
|
||||
TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings);
|
||||
SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings);
|
||||
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null)
|
||||
);
|
||||
|
||||
assertEquals("Token filter [multiplexer] cannot be used to parse synonyms unless [preserve_original] is [true]", e.getMessage());
|
||||
|
||||
}
|
||||
|
||||
private void match(String analyzerName, String source, String target) throws IOException {
|
||||
|
|
|
@ -72,7 +72,6 @@ import org.opensearch.common.logging.DeprecationLogger;
|
|||
import org.opensearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.opensearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.opensearch.common.xcontent.XContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
import org.opensearch.common.xcontent.XContentHelper;
|
||||
|
@ -290,11 +289,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
name = in.readOptionalString();
|
||||
}
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
documentType = in.readString();
|
||||
} else {
|
||||
documentType = in.readOptionalString();
|
||||
}
|
||||
indexedDocumentIndex = in.readOptionalString();
|
||||
indexedDocumentType = in.readOptionalString();
|
||||
indexedDocumentId = in.readOptionalString();
|
||||
|
@ -337,11 +332,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
out.writeOptionalString(name);
|
||||
}
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
out.writeString(documentType);
|
||||
} else {
|
||||
out.writeOptionalString(documentType);
|
||||
}
|
||||
out.writeOptionalString(indexedDocumentIndex);
|
||||
out.writeOptionalString(indexedDocumentType);
|
||||
out.writeOptionalString(indexedDocumentId);
|
||||
|
@ -707,7 +698,6 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
if (binaryDocValues == null) {
|
||||
return docId -> null;
|
||||
}
|
||||
if (indexVersion.onOrAfter(LegacyESVersion.V_6_0_0_beta2)) {
|
||||
return docId -> {
|
||||
if (binaryDocValues.advanceExact(docId)) {
|
||||
BytesRef qbSource = binaryDocValues.binaryValue();
|
||||
|
@ -737,36 +727,6 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
return null;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
return docId -> {
|
||||
if (binaryDocValues.advanceExact(docId)) {
|
||||
BytesRef qbSource = binaryDocValues.binaryValue();
|
||||
if (qbSource.length > 0) {
|
||||
XContent xContent = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent();
|
||||
try (
|
||||
XContentParser sourceParser = xContent.createParser(
|
||||
context.getXContentRegistry(),
|
||||
LoggingDeprecationHandler.INSTANCE,
|
||||
qbSource.bytes,
|
||||
qbSource.offset,
|
||||
qbSource.length
|
||||
)
|
||||
) {
|
||||
QueryBuilder queryBuilder = PercolatorFieldMapper.parseQueryBuilder(
|
||||
sourceParser,
|
||||
sourceParser.getTokenLocation()
|
||||
);
|
||||
queryBuilder = Rewriteable.rewrite(queryBuilder, context);
|
||||
return queryBuilder.toQuery(context);
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
package org.opensearch.percolator;
|
||||
|
||||
import org.apache.lucene.document.BinaryDocValuesField;
|
||||
import org.apache.lucene.document.BinaryRange;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
|
@ -66,8 +65,6 @@ import org.opensearch.common.io.stream.OutputStreamStreamOutput;
|
|||
import org.opensearch.common.lucene.search.Queries;
|
||||
import org.opensearch.common.settings.Setting;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
import org.opensearch.common.xcontent.XContentLocation;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
|
@ -102,7 +99,6 @@ import java.io.IOException;
|
|||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -420,7 +416,6 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
|
|||
|
||||
static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbField, QueryBuilder queryBuilder, ParseContext context)
|
||||
throws IOException {
|
||||
if (indexVersion.onOrAfter(LegacyESVersion.V_6_0_0_beta2)) {
|
||||
try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) {
|
||||
try (OutputStreamStreamOutput out = new OutputStreamStreamOutput(stream)) {
|
||||
out.setVersion(indexVersion);
|
||||
|
@ -429,14 +424,6 @@ public class PercolatorFieldMapper extends ParametrizedFieldMapper {
|
|||
qbField.parse(context.createExternalValueContext(queryBuilderAsBytes));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) {
|
||||
queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap()));
|
||||
builder.flush();
|
||||
byte[] queryBuilderAsBytes = BytesReference.toBytes(BytesReference.bytes(builder));
|
||||
context.doc().add(new BinaryDocValuesField(qbField.name(), new BytesRef(queryBuilderAsBytes)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static final FieldType INDEXED_KEYWORD = new FieldType();
|
||||
|
|
|
@ -53,7 +53,6 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.support.PlainActionFuture;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
|
@ -488,12 +487,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
assertEquals(2, t.v1().clauses().size());
|
||||
assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class));
|
||||
assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class));
|
||||
|
||||
t = fieldType.createCandidateQuery(indexReader, LegacyESVersion.V_6_0_0);
|
||||
assertTrue(t.v2());
|
||||
assertEquals(2, t.v1().clauses().size());
|
||||
assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class));
|
||||
assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class));
|
||||
}
|
||||
|
||||
public void testExtractTermsAndRanges_numberFields() throws Exception {
|
||||
|
|
|
@ -69,7 +69,6 @@ import org.apache.lucene.search.spans.SpanNotQuery;
|
|||
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.lucene.search.function.CombineFunction;
|
||||
import org.opensearch.common.lucene.search.function.FunctionScoreQuery;
|
||||
|
@ -164,21 +163,6 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
|
|||
assertThat(terms.get(5).bytes().utf8ToString(), equalTo("_term6"));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_multiPhraseQuery_pre6dot1() {
|
||||
MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder().add(new Term("_field", "_long_term"))
|
||||
.add(new Term[] { new Term("_field", "_long_term"), new Term("_field", "_term") })
|
||||
.add(new Term[] { new Term("_field", "_long_term"), new Term("_field", "_very_long_term") })
|
||||
.add(new Term[] { new Term("_field", "_very_long_term") })
|
||||
.build();
|
||||
Result result = analyze(multiPhraseQuery, LegacyESVersion.V_6_0_0);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
assertThat(terms.size(), equalTo(1));
|
||||
assertThat(terms.get(0).field(), equalTo("_field"));
|
||||
assertThat(terms.get(0).bytes().utf8ToString(), equalTo("_very_long_term"));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_multiPhraseQuery_dups() {
|
||||
MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder().add(new Term("_field", "_term1"))
|
||||
.add(new Term[] { new Term("_field", "_term1"), new Term("_field", "_term2") })
|
||||
|
@ -224,35 +208,6 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
|
|||
assertThat(terms.get(4).bytes(), equalTo(termQuery3.getTerm().bytes()));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery_pre6dot1() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term"));
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2");
|
||||
builder.add(phraseQuery, BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery.Builder subBuilder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery2 = new TermQuery(new Term("_field1", "_term"));
|
||||
subBuilder.add(termQuery2, BooleanClause.Occur.MUST);
|
||||
TermQuery termQuery3 = new TermQuery(new Term("_field3", "_long_term"));
|
||||
subBuilder.add(termQuery3, BooleanClause.Occur.MUST);
|
||||
builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
Result result = analyze(booleanQuery, LegacyESVersion.V_6_0_0);
|
||||
assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> terms = new ArrayList<>(result.extractions);
|
||||
terms.sort(Comparator.comparing(qt -> qt.term));
|
||||
assertThat(terms.size(), equalTo(3));
|
||||
assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field()));
|
||||
assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes()));
|
||||
assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[0].field()));
|
||||
assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[0].bytes()));
|
||||
assertThat(terms.get(2).field(), equalTo(termQuery3.getTerm().field()));
|
||||
assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes()));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery_msm() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.setMinimumNumberShouldMatch(2);
|
||||
|
@ -328,28 +283,6 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
|
|||
assertFalse(result.verified);
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery_msm_pre6dot1() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.setMinimumNumberShouldMatch(2);
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1"));
|
||||
builder.add(termQuery1, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2"));
|
||||
builder.add(termQuery2, BooleanClause.Occur.SHOULD);
|
||||
TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3"));
|
||||
builder.add(termQuery3, BooleanClause.Occur.SHOULD);
|
||||
|
||||
BooleanQuery booleanQuery = builder.build();
|
||||
Result result = analyze(booleanQuery, LegacyESVersion.V_6_0_0);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
List<QueryExtraction> extractions = new ArrayList<>(result.extractions);
|
||||
extractions.sort(Comparator.comparing(extraction -> extraction.term));
|
||||
assertThat(extractions.size(), equalTo(3));
|
||||
assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term1")));
|
||||
assertThat(extractions.get(1).term, equalTo(new Term("_field", "_term2")));
|
||||
assertThat(extractions.get(2).term, equalTo(new Term("_field", "_term3")));
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_booleanQuery_onlyShould() {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1"));
|
||||
|
@ -403,12 +336,6 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
|
|||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(0));
|
||||
assertTermsEqual(result.extractions);
|
||||
|
||||
result = analyze(booleanQuery, LegacyESVersion.V_6_0_0);
|
||||
assertThat(result.matchAllDocs, is(true));
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(0));
|
||||
assertTermsEqual(result.extractions);
|
||||
}
|
||||
|
||||
public void testExactMatch_booleanQuery() {
|
||||
|
@ -650,17 +577,6 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
|
|||
assertTermsEqual(result.extractions, spanTermQuery1.getTerm(), spanTermQuery2.getTerm());
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_spanNearQuery_pre6dot1() {
|
||||
SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term"));
|
||||
SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term"));
|
||||
SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true).addClause(spanTermQuery1).addClause(spanTermQuery2).build();
|
||||
|
||||
Result result = analyze(spanNearQuery, LegacyESVersion.V_6_0_0);
|
||||
assertThat(result.verified, is(false));
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertTermsEqual(result.extractions, spanTermQuery2.getTerm());
|
||||
}
|
||||
|
||||
public void testExtractQueryMetadata_spanOrQuery() {
|
||||
SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term"));
|
||||
SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term"));
|
||||
|
@ -1215,53 +1131,6 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
|
|||
assertEquals(new Term("field", "value"), result.extractions.toArray(new QueryExtraction[0])[0].term);
|
||||
}
|
||||
|
||||
public void testPointRangeQuerySelectShortestRange() {
|
||||
BooleanQuery.Builder boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
Result result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(IntPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(DoublePoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(FloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
|
||||
boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(HalfFloatPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER);
|
||||
boolQuery.add(HalfFloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER);
|
||||
result = analyze(boolQuery.build(), LegacyESVersion.V_6_0_0);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
assertEquals(1, result.extractions.size());
|
||||
assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName);
|
||||
}
|
||||
|
||||
public void testPointRangeQuerySelectRanges() {
|
||||
BooleanQuery.Builder boolQuery = new BooleanQuery.Builder();
|
||||
boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.SHOULD);
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.mockito.Mockito;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.CheckedFunction;
|
||||
|
@ -91,7 +90,7 @@ public class QueryBuilderStoreTests extends OpenSearchTestCase {
|
|||
new Mapper.BuilderContext(settings, new ContentPath(0))
|
||||
);
|
||||
|
||||
Version version = LegacyESVersion.V_6_0_0_beta2;
|
||||
Version version = Version.CURRENT;
|
||||
try (IndexWriter indexWriter = new IndexWriter(directory, config)) {
|
||||
for (int i = 0; i < queryBuilders.length; i++) {
|
||||
queryBuilders[i] = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(8));
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.index.reindex;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.bytes.BytesArray;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
|
@ -93,15 +92,10 @@ public class RoundTripTests extends OpenSearchTestCase {
|
|||
ReindexRequest tripped = new ReindexRequest(toInputByteStream(reindex));
|
||||
assertRequestEquals(reindex, tripped);
|
||||
|
||||
// Try slices=auto with a version that doesn't support it, which should fail
|
||||
reindex.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(LegacyESVersion.V_6_0_0_alpha1, reindex));
|
||||
assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage());
|
||||
|
||||
// Try regular slices with a version that doesn't support slices=auto, which should succeed
|
||||
reindex.setSlices(between(1, Integer.MAX_VALUE));
|
||||
tripped = new ReindexRequest(toInputByteStream(reindex));
|
||||
assertRequestEquals(LegacyESVersion.V_6_0_0_alpha1, reindex, tripped);
|
||||
assertRequestEquals(reindex, tripped);
|
||||
}
|
||||
|
||||
public void testUpdateByQueryRequest() throws IOException {
|
||||
|
@ -114,11 +108,6 @@ public class RoundTripTests extends OpenSearchTestCase {
|
|||
assertRequestEquals(update, tripped);
|
||||
assertEquals(update.getPipeline(), tripped.getPipeline());
|
||||
|
||||
// Try slices=auto with a version that doesn't support it, which should fail
|
||||
update.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(LegacyESVersion.V_6_0_0_alpha1, update));
|
||||
assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage());
|
||||
|
||||
// Try regular slices with a version that doesn't support slices=auto, which should succeed
|
||||
update.setSlices(between(1, Integer.MAX_VALUE));
|
||||
tripped = new UpdateByQueryRequest(toInputByteStream(update));
|
||||
|
@ -132,11 +121,6 @@ public class RoundTripTests extends OpenSearchTestCase {
|
|||
DeleteByQueryRequest tripped = new DeleteByQueryRequest(toInputByteStream(delete));
|
||||
assertRequestEquals(delete, tripped);
|
||||
|
||||
// Try slices=auto with a version that doesn't support it, which should fail
|
||||
delete.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> toInputByteStream(LegacyESVersion.V_6_0_0_alpha1, delete));
|
||||
assertEquals("Slices set as \"auto\" are not supported before version [6.1.0]. Found version [6.0.0-alpha1]", e.getMessage());
|
||||
|
||||
// Try regular slices with a version that doesn't support slices=auto, which should succeed
|
||||
delete.setSlices(between(1, Integer.MAX_VALUE));
|
||||
tripped = new DeleteByQueryRequest(toInputByteStream(delete));
|
||||
|
|
|
@ -76,24 +76,6 @@ public class AnalysisPhoneticFactoryTests extends AnalysisFactoryTestCase {
|
|||
TokenFilterFactory tff = plugin.getTokenFilters().get("phonetic").get(idxSettings, null, "phonetic", settings);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, tff::getSynonymFilter);
|
||||
assertEquals("Token filter [phonetic] cannot be used to parse synonyms", e.getMessage());
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(
|
||||
IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
)
|
||||
)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
||||
tff = plugin.getTokenFilters().get("phonetic").get(idxSettings, null, "phonetic", settings);
|
||||
tff.getSynonymFilter();
|
||||
|
||||
assertWarnings("Token filter [phonetic] will not be usable to parse synonyms after v7.0");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -100,8 +100,6 @@ import static org.hamcrest.Matchers.startsWith;
|
|||
* with {@code tests.is_old_cluster} set to {@code false}.
|
||||
*/
|
||||
public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
||||
private final boolean supportsLenientBooleans = getOldClusterVersion().before(LegacyESVersion.V_6_0_0_alpha1);
|
||||
|
||||
private String index;
|
||||
private String type;
|
||||
|
||||
|
@ -173,7 +171,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
.field("int", randomInt(100))
|
||||
.field("float", randomFloat())
|
||||
// be sure to create a "proper" boolean (True, False) for the first document so that automapping is correct
|
||||
.field("bool", i > 0 && supportsLenientBooleans ? randomLenientBoolean() : randomBoolean())
|
||||
.field("bool", i > 0 && randomBoolean())
|
||||
.field("field.with.dots", randomAlphaOfLength(10))
|
||||
.field("binary", Base64.getEncoder().encodeToString(randomByteArray))
|
||||
.endObject()
|
||||
|
@ -1200,11 +1198,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
|
||||
Map<String, Object> getTemplateResponse = entityAsMap(client().performRequest(getTemplateRequest));
|
||||
Map<String, Object> expectedTemplate = new HashMap<>();
|
||||
if (isRunningAgainstOldCluster() && getOldClusterVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
expectedTemplate.put("template", "evil_*");
|
||||
} else {
|
||||
expectedTemplate.put("index_patterns", singletonList("evil_*"));
|
||||
}
|
||||
expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
|
||||
// We don't have the type in the response starting with 7.0, but we won't have it on old cluster after upgrade
|
||||
// either so look at the response to figure out the correct assertions
|
||||
|
|
|
@ -199,7 +199,8 @@ public class IndexingIT extends OpenSearchRestTestCase {
|
|||
final int numberOfInitialDocs = 1 + randomInt(5);
|
||||
logger.info("indexing [{}] docs initially", numberOfInitialDocs);
|
||||
numDocs += indexDocs(index, 0, numberOfInitialDocs);
|
||||
assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().onOrAfter(LegacyESVersion.V_6_0_0) ? numDocs : 0, newNodeClient);
|
||||
boolean compat = nodes.getBWCVersion().compareTo(LegacyESVersion.fromId(6000000)) >= 0;
|
||||
assertSeqNoOnShards(index, nodes, compat ? numDocs : 0, newNodeClient);
|
||||
logger.info("allowing shards on all nodes");
|
||||
updateIndexSettings(index, Settings.builder().putNull("index.routing.allocation.include._name"));
|
||||
ensureGreen(index);
|
||||
|
@ -210,7 +211,8 @@ public class IndexingIT extends OpenSearchRestTestCase {
|
|||
final int numberOfDocsAfterAllowingShardsOnAllNodes = 1 + randomInt(5);
|
||||
logger.info("indexing [{}] docs after allowing shards on all nodes", numberOfDocsAfterAllowingShardsOnAllNodes);
|
||||
numDocs += indexDocs(index, numDocs, numberOfDocsAfterAllowingShardsOnAllNodes);
|
||||
assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().onOrAfter(LegacyESVersion.V_6_0_0) ? numDocs : 0, newNodeClient);
|
||||
compat = nodes.getBWCVersion().compareTo(LegacyESVersion.fromId(6000000)) >= 0;
|
||||
assertSeqNoOnShards(index, nodes, compat ? numDocs : 0, newNodeClient);
|
||||
Shard primary = buildShards(index, nodes, newNodeClient).stream().filter(Shard::isPrimary).findFirst().get();
|
||||
logger.info("moving primary to new node by excluding {}", primary.getNode().getNodeName());
|
||||
updateIndexSettings(index, Settings.builder().put("index.routing.allocation.exclude._name", primary.getNode().getNodeName()));
|
||||
|
@ -220,8 +222,8 @@ public class IndexingIT extends OpenSearchRestTestCase {
|
|||
logger.info("indexing [{}] docs after moving primary", numberOfDocsAfterMovingPrimary);
|
||||
numDocsOnNewPrimary += indexDocs(index, numDocs, numberOfDocsAfterMovingPrimary);
|
||||
numDocs += numberOfDocsAfterMovingPrimary;
|
||||
assertSeqNoOnShards(index, nodes,
|
||||
nodes.getBWCVersion().onOrAfter(LegacyESVersion.V_6_0_0) ? numDocs : numDocsOnNewPrimary, newNodeClient);
|
||||
compat = nodes.getBWCVersion().compareTo(LegacyESVersion.fromId(6000000)) >= 0;
|
||||
assertSeqNoOnShards(index, nodes, compat ? numDocs : numDocsOnNewPrimary, newNodeClient);
|
||||
/*
|
||||
* Dropping the number of replicas to zero, and then increasing it to one triggers a recovery thus exercising any BWC-logic in
|
||||
* the recovery code.
|
||||
|
@ -240,8 +242,8 @@ public class IndexingIT extends OpenSearchRestTestCase {
|
|||
for (Shard shard : buildShards(index, nodes, newNodeClient)) {
|
||||
assertCount(index, "_only_nodes:" + shard.node.nodeName, numDocs);
|
||||
}
|
||||
assertSeqNoOnShards(index, nodes,
|
||||
nodes.getBWCVersion().onOrAfter(LegacyESVersion.V_6_0_0) ? numDocs : numDocsOnNewPrimary, newNodeClient);
|
||||
compat = nodes.getBWCVersion().compareTo(LegacyESVersion.fromId(6000000)) >= 0;
|
||||
assertSeqNoOnShards(index, nodes, compat ? numDocs : numDocsOnNewPrimary, newNodeClient);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.apache.lucene.search.SortedSetSortField;
|
|||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.util.Constants;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.admin.cluster.state.ClusterStateRequest;
|
||||
import org.opensearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
|
@ -399,7 +398,7 @@ public class SplitIndexIT extends OpenSearchIntegTestCase {
|
|||
|
||||
public void testCreateSplitIndex() throws Exception {
|
||||
internalCluster().ensureAtLeastNumDataNodes(2);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0_rc2, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
prepareCreate("source").setSettings(
|
||||
Settings.builder().put(indexSettings()).put("number_of_shards", 1).put("index.version.created", version)
|
||||
).get();
|
||||
|
|
|
@ -1,94 +0,0 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Modifications Copyright OpenSearch Contributors. See
|
||||
* GitHub history for details.
|
||||
*/
|
||||
|
||||
package org.opensearch.get;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.admin.indices.alias.Alias;
|
||||
import org.opensearch.action.get.GetResponse;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.test.OpenSearchIntegTestCase;
|
||||
|
||||
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.opensearch.get.GetActionIT.indexOrAlias;
|
||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class LegacyGetActionIT extends OpenSearchIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean forbidPrivateIndexSettings() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testGetFieldsMetadataWithRouting() throws Exception {
|
||||
assertAcked(
|
||||
prepareCreate("test").addMapping("_doc", "field1", "type=keyword,store=true")
|
||||
.addAlias(new Alias("alias"))
|
||||
.setSettings(
|
||||
Settings.builder()
|
||||
.put("index.refresh_interval", -1)
|
||||
// multi-types in 6.0.0
|
||||
.put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), LegacyESVersion.V_6_0_0)
|
||||
)
|
||||
);
|
||||
|
||||
try (XContentBuilder source = jsonBuilder().startObject().field("field1", "value").endObject()) {
|
||||
client().prepareIndex("test", "_doc", "1").setRouting("1").setSource(source).get();
|
||||
}
|
||||
|
||||
{
|
||||
final GetResponse getResponse = client().prepareGet(indexOrAlias(), "_doc", "1")
|
||||
.setRouting("1")
|
||||
.setStoredFields("field1")
|
||||
.get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value"));
|
||||
assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1"));
|
||||
}
|
||||
|
||||
flush();
|
||||
|
||||
{
|
||||
final GetResponse getResponse = client().prepareGet(indexOrAlias(), "_doc", "1")
|
||||
.setStoredFields("field1")
|
||||
.setRouting("1")
|
||||
.get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value"));
|
||||
assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1"));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Modifications Copyright OpenSearch Contributors. See
|
||||
* GitHub history for details.
|
||||
*/
|
||||
|
||||
package org.opensearch.indices.stats;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
|
||||
import org.opensearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.search.sort.SortOrder;
|
||||
import org.opensearch.test.OpenSearchIntegTestCase;
|
||||
|
||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
|
||||
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class LegacyIndexStatsIT extends OpenSearchIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean forbidPrivateIndexSettings() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testFieldDataFieldsParam() {
|
||||
assertAcked(
|
||||
client().admin()
|
||||
.indices()
|
||||
.prepareCreate("test1")
|
||||
.setSettings(Settings.builder().put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), LegacyESVersion.V_6_0_0))
|
||||
.addMapping("_doc", "bar", "type=text,fielddata=true", "baz", "type=text,fielddata=true")
|
||||
.get()
|
||||
);
|
||||
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test1", "_doc", Integer.toString(1)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}", XContentType.JSON).get();
|
||||
client().prepareIndex("test1", "_doc", Integer.toString(2)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}", XContentType.JSON).get();
|
||||
refresh();
|
||||
|
||||
client().prepareSearch("_all").addSort("bar", SortOrder.ASC).addSort("baz", SortOrder.ASC).execute().actionGet();
|
||||
|
||||
final IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats();
|
||||
|
||||
{
|
||||
final IndicesStatsResponse stats = builder.execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields(), is(nullValue()));
|
||||
}
|
||||
|
||||
{
|
||||
final IndicesStatsResponse stats = builder.setFieldDataFields("bar").execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsField("bar"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsField("baz"), is(false));
|
||||
}
|
||||
|
||||
{
|
||||
final IndicesStatsResponse stats = builder.setFieldDataFields("bar", "baz").execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsField("bar"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsField("baz"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0L));
|
||||
}
|
||||
|
||||
{
|
||||
final IndicesStatsResponse stats = builder.setFieldDataFields("*").execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsField("bar"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsField("baz"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0L));
|
||||
}
|
||||
|
||||
{
|
||||
final IndicesStatsResponse stats = builder.setFieldDataFields("*r").execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsField("bar"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsField("baz"), is(false));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
package org.opensearch.search.aggregations.bucket;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.index.IndexRequestBuilder;
|
||||
import org.opensearch.action.search.SearchPhaseExecutionException;
|
||||
|
@ -78,7 +77,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
private Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
private Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
|
||||
private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception {
|
||||
XContentBuilder source = jsonBuilder().startObject().field("city", name);
|
||||
|
|
|
@ -34,7 +34,6 @@ package org.opensearch.search.aggregations.bucket;
|
|||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.index.IndexRequestBuilder;
|
||||
import org.opensearch.action.search.SearchResponse;
|
||||
|
@ -75,7 +74,7 @@ public class GeoHashGridIT extends OpenSearchIntegTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
private Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
private Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
|
||||
static ObjectIntMap<String> expectedDocCountsForGeoHash = null;
|
||||
static ObjectIntMap<String> multiValuedExpectedDocCountsForGeoHash = null;
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.search.functionscore;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionFuture;
|
||||
import org.opensearch.action.index.IndexRequestBuilder;
|
||||
|
@ -886,7 +885,7 @@ public class DecayFunctionScoreIT extends OpenSearchIntegTestCase {
|
|||
}
|
||||
|
||||
public void testManyDocsLin() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.search.geo;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.search.SearchResponse;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
|
@ -61,7 +60,7 @@ public class GeoBoundingBoxQueryIT extends OpenSearchIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimpleBoundingBoxTest() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -185,7 +184,7 @@ public class GeoBoundingBoxQueryIT extends OpenSearchIntegTestCase {
|
|||
}
|
||||
|
||||
public void testLimit2BoundingBox() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -263,7 +262,7 @@ public class GeoBoundingBoxQueryIT extends OpenSearchIntegTestCase {
|
|||
}
|
||||
|
||||
public void testCompleteLonRange() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.search.geo;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.search.SearchRequestBuilder;
|
||||
import org.opensearch.action.search.SearchResponse;
|
||||
|
@ -121,7 +120,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
|
|||
|
||||
@Before
|
||||
public void setupTestIndex() throws IOException {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.lucene.spatial.query.SpatialArgs;
|
|||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.opensearch.action.bulk.BulkItemResponse;
|
||||
|
@ -395,7 +394,7 @@ public class GeoFilterIT extends OpenSearchIntegTestCase {
|
|||
|
||||
public void testBulk() throws Exception {
|
||||
byte[] bulkAction = unZipData("/org/opensearch/search/geo/gzippedmap.gz");
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.search.geo;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.search.SearchResponse;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
|
@ -63,7 +62,7 @@ public class GeoPolygonIT extends OpenSearchIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected void setupSuiteScopeCluster() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
|
||||
assertAcked(
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.search.sort;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.search.SearchResponse;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
|
@ -69,7 +68,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
|
|||
}
|
||||
|
||||
public void testDistanceSortingMVFields() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -259,10 +258,8 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// Regression bug:
|
||||
// https://github.com/elastic/elasticsearch/issues/2851
|
||||
public void testDistanceSortingWithMissingGeoPoint() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -325,7 +322,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
|
|||
}
|
||||
|
||||
public void testDistanceSortingNestedFields() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -575,7 +572,7 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
|
|||
* Issue 3073
|
||||
*/
|
||||
public void testGeoDistanceFilter() throws IOException {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
double lat = 40.720611;
|
||||
double lon = -73.998776;
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.search.sort;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.search.SearchResponse;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
|
@ -82,9 +81,7 @@ public class GeoDistanceSortBuilderIT extends OpenSearchIntegTestCase {
|
|||
* |___________________________
|
||||
* 1 2 3 4 5 6 7
|
||||
*/
|
||||
Version version = randomBoolean()
|
||||
? Version.CURRENT
|
||||
: VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
|
||||
XContentBuilder d1Builder = jsonBuilder();
|
||||
|
@ -175,9 +172,7 @@ public class GeoDistanceSortBuilderIT extends OpenSearchIntegTestCase {
|
|||
* d1 = (0, 1), (0, 4), (0, 10); so avg. distance is 5, median distance is 4
|
||||
* d2 = (0, 1), (0, 5), (0, 6); so avg. distance is 4, median distance is 5
|
||||
*/
|
||||
Version version = randomBoolean()
|
||||
? Version.CURRENT
|
||||
: VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
|
||||
XContentBuilder d1Builder = jsonBuilder();
|
||||
|
@ -251,9 +246,7 @@ public class GeoDistanceSortBuilderIT extends OpenSearchIntegTestCase {
|
|||
* |______________________
|
||||
* 1 2 3 4 5 6
|
||||
*/
|
||||
Version version = randomBoolean()
|
||||
? Version.CURRENT
|
||||
: VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
|
||||
XContentBuilder d1Builder = jsonBuilder();
|
||||
|
|
|
@ -46,14 +46,6 @@ import java.lang.reflect.Field;
|
|||
*/
|
||||
public class LegacyESVersion extends Version {
|
||||
|
||||
public static final LegacyESVersion V_6_0_0_alpha1 = new LegacyESVersion(6000001, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
public static final LegacyESVersion V_6_0_0_alpha2 = new LegacyESVersion(6000002, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
public static final LegacyESVersion V_6_0_0_beta1 = new LegacyESVersion(6000026, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
public static final LegacyESVersion V_6_0_0_beta2 = new LegacyESVersion(6000027, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
public static final LegacyESVersion V_6_0_0_rc1 = new LegacyESVersion(6000051, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
public static final LegacyESVersion V_6_0_0_rc2 = new LegacyESVersion(6000052, org.apache.lucene.util.Version.LUCENE_7_0_1);
|
||||
public static final LegacyESVersion V_6_0_0 = new LegacyESVersion(6000099, org.apache.lucene.util.Version.LUCENE_7_0_1);
|
||||
public static final LegacyESVersion V_6_0_1 = new LegacyESVersion(6000199, org.apache.lucene.util.Version.LUCENE_7_0_1);
|
||||
public static final LegacyESVersion V_6_1_0 = new LegacyESVersion(6010099, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
public static final LegacyESVersion V_6_1_1 = new LegacyESVersion(6010199, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
public static final LegacyESVersion V_6_1_2 = new LegacyESVersion(6010299, org.apache.lucene.util.Version.LUCENE_7_1_0);
|
||||
|
|
|
@ -377,7 +377,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
if (major == 5) {
|
||||
bwcMajor = 2; // we jumped from 2 to 5
|
||||
} else if (major == 7 || major == 1) {
|
||||
return LegacyESVersion.V_6_0_0_beta1;
|
||||
return LegacyESVersion.fromId(6000026);
|
||||
} else if (major == 2) {
|
||||
return LegacyESVersion.V_7_0_0;
|
||||
} else {
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
package org.opensearch.action;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.support.WriteRequest;
|
||||
import org.opensearch.action.support.WriteRequest.RefreshPolicy;
|
||||
import org.opensearch.action.support.WriteResponse;
|
||||
|
@ -168,13 +167,8 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
type = in.readString();
|
||||
id = in.readString();
|
||||
version = in.readZLong();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
seqNo = in.readZLong();
|
||||
primaryTerm = in.readVLong();
|
||||
} else {
|
||||
seqNo = UNASSIGNED_SEQ_NO;
|
||||
primaryTerm = UNASSIGNED_PRIMARY_TERM;
|
||||
}
|
||||
forcedRefresh = in.readBoolean();
|
||||
result = Result.readFrom(in);
|
||||
}
|
||||
|
@ -317,10 +311,8 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
out.writeString(type);
|
||||
out.writeString(id);
|
||||
out.writeZLong(version);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeZLong(seqNo);
|
||||
out.writeVLong(primaryTerm);
|
||||
}
|
||||
out.writeBoolean(forcedRefresh);
|
||||
result.writeTo(out);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.cluster.storedscripts;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.support.master.AcknowledgedRequest;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -48,10 +47,6 @@ public class DeleteStoredScriptRequest extends AcknowledgedRequest<DeleteStoredS
|
|||
|
||||
public DeleteStoredScriptRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
in.readString(); // read lang from previous versions
|
||||
}
|
||||
|
||||
id = in.readString();
|
||||
}
|
||||
|
||||
|
@ -91,11 +86,6 @@ public class DeleteStoredScriptRequest extends AcknowledgedRequest<DeleteStoredS
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
out.writeString(""); // write an empty lang to previous versions
|
||||
}
|
||||
|
||||
out.writeString(id);
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.cluster.storedscripts;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -58,21 +57,12 @@ public class GetStoredScriptRequest extends MasterNodeReadRequest<GetStoredScrip
|
|||
|
||||
public GetStoredScriptRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
in.readString(); // read lang from previous versions
|
||||
}
|
||||
|
||||
id = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
out.writeString(""); // write an empty lang to previous versions
|
||||
}
|
||||
|
||||
out.writeString(id);
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.cluster.storedscripts;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.support.master.AcknowledgedRequest;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
|
@ -59,18 +58,11 @@ public class PutStoredScriptRequest extends AcknowledgedRequest<PutStoredScriptR
|
|||
|
||||
public PutStoredScriptRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
in.readString(); // read lang from previous versions
|
||||
}
|
||||
id = in.readOptionalString();
|
||||
content = in.readBytesReference();
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
context = in.readOptionalString();
|
||||
source = new StoredScriptSource(in);
|
||||
} else {
|
||||
source = StoredScriptSource.parse(content, xContentType == null ? XContentType.JSON : xContentType);
|
||||
}
|
||||
}
|
||||
|
||||
public PutStoredScriptRequest() {
|
||||
|
@ -146,18 +138,12 @@ public class PutStoredScriptRequest extends AcknowledgedRequest<PutStoredScriptR
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
out.writeString(source == null ? "" : source.getLang());
|
||||
}
|
||||
out.writeOptionalString(id);
|
||||
out.writeBytesReference(content);
|
||||
out.writeEnum(xContentType);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
out.writeOptionalString(context);
|
||||
source.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.cache.clear;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.support.broadcast.BroadcastRequest;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -51,9 +50,6 @@ public class ClearIndicesCacheRequest extends BroadcastRequest<ClearIndicesCache
|
|||
super(in);
|
||||
queryCache = in.readBoolean();
|
||||
fieldDataCache = in.readBoolean();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
in.readBoolean(); // recycler
|
||||
}
|
||||
fields = in.readStringArray();
|
||||
requestCache = in.readBoolean();
|
||||
}
|
||||
|
@ -103,9 +99,6 @@ public class ClearIndicesCacheRequest extends BroadcastRequest<ClearIndicesCache
|
|||
super.writeTo(out);
|
||||
out.writeBoolean(queryCache);
|
||||
out.writeBoolean(fieldDataCache);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
out.writeBoolean(false); // recycler
|
||||
}
|
||||
out.writeStringArrayNullable(fields);
|
||||
out.writeBoolean(requestCache);
|
||||
}
|
||||
|
|
|
@ -109,10 +109,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
for (int i = 0; i < size; i++) {
|
||||
final String type = in.readString();
|
||||
String source = in.readString();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) { // TODO change to 5.3.0 after backport
|
||||
// we do not know the content type that comes from earlier versions so we autodetect and convert
|
||||
source = XContentHelper.convertToJson(new BytesArray(source), false, false, XContentFactory.xContentType(source));
|
||||
}
|
||||
mappings.put(type, source);
|
||||
}
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_5_0)) {
|
||||
|
|
|
@ -131,10 +131,6 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
|
|||
|
||||
public StoreStatus(StreamInput in) throws IOException {
|
||||
node = new DiscoveryNode(in);
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// legacy version
|
||||
in.readLong();
|
||||
}
|
||||
allocationId = in.readOptionalString();
|
||||
allocationStatus = AllocationStatus.readFrom(in);
|
||||
if (in.readBoolean()) {
|
||||
|
@ -185,10 +181,6 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
node.writeTo(out);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// legacy version
|
||||
out.writeLong(-1L);
|
||||
}
|
||||
out.writeOptionalString(allocationId);
|
||||
allocationStatus.writeTo(out);
|
||||
if (storeException != null) {
|
||||
|
|
|
@ -79,9 +79,7 @@ public class ShardStats implements Writeable, ToXContentFragment {
|
|||
statePath = in.readString();
|
||||
dataPath = in.readString();
|
||||
isCustomDataPath = in.readBoolean();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
seqNoStats = in.readOptionalWriteable(SeqNoStats::new);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_7_0)) {
|
||||
retentionLeaseStats = in.readOptionalWriteable(RetentionLeaseStats::new);
|
||||
}
|
||||
|
@ -146,9 +144,7 @@ public class ShardStats implements Writeable, ToXContentFragment {
|
|||
out.writeString(statePath);
|
||||
out.writeString(dataPath);
|
||||
out.writeBoolean(isCustomDataPath);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeOptionalWriteable(seqNoStats);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_7_0)) {
|
||||
out.writeOptionalWriteable(retentionLeaseStats);
|
||||
}
|
||||
|
|
|
@ -104,12 +104,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
super(in);
|
||||
cause = in.readString();
|
||||
name = in.readString();
|
||||
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
indexPatterns = in.readStringList();
|
||||
} else {
|
||||
indexPatterns = Collections.singletonList(in.readString());
|
||||
}
|
||||
order = in.readInt();
|
||||
create = in.readBoolean();
|
||||
settings = readSettingsFromStream(in);
|
||||
|
@ -498,11 +493,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
super.writeTo(out);
|
||||
out.writeString(cause);
|
||||
out.writeString(name);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeStringCollection(indexPatterns);
|
||||
} else {
|
||||
out.writeString(indexPatterns.size() > 0 ? indexPatterns.get(0) : "");
|
||||
}
|
||||
out.writeInt(order);
|
||||
out.writeBoolean(create);
|
||||
writeSettingsToStream(settings, out);
|
||||
|
|
|
@ -149,10 +149,6 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
if (in.getVersion().before(LegacyESVersion.V_7_0_0)) {
|
||||
in.readOptionalString(); // _parent
|
||||
}
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
in.readOptionalString(); // timestamp
|
||||
in.readOptionalTimeValue(); // ttl
|
||||
}
|
||||
source = in.readBytesReference();
|
||||
opType = OpType.fromId(in.readByte());
|
||||
version = in.readLong();
|
||||
|
@ -710,13 +706,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
assert ifSeqNo == UNASSIGNED_SEQ_NO;
|
||||
assert ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM;
|
||||
autoGeneratedTimestamp = Math.max(0, System.currentTimeMillis()); // extra paranoia
|
||||
String uid;
|
||||
if (indexCreatedVersion.onOrAfter(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
uid = UUIDs.base64UUID();
|
||||
} else {
|
||||
uid = UUIDs.legacyBase64UUID();
|
||||
}
|
||||
id(uid);
|
||||
id(UUIDs.base64UUID());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -756,13 +746,6 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
if (out.getVersion().before(LegacyESVersion.V_7_0_0)) {
|
||||
out.writeOptionalString(null); // _parent
|
||||
}
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// Serialize a fake timestamp. 5.x expect this value to be set by the #process method so we can't use null.
|
||||
// On the other hand, indices created on 5.x do not index the timestamp field. Therefore passing a 0 (or any value) for
|
||||
// the transport layer OK as it will be ignored.
|
||||
out.writeOptionalString("0");
|
||||
out.writeOptionalWriteable(null);
|
||||
}
|
||||
out.writeBytesReference(source);
|
||||
out.writeByte(opType.getId());
|
||||
out.writeLong(version);
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.ingest;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -45,9 +44,7 @@ import org.opensearch.ingest.IngestDocument;
|
|||
import org.opensearch.ingest.IngestDocument.Metadata;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -116,12 +113,6 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment {
|
|||
WriteableIngestDocument(StreamInput in) throws IOException {
|
||||
Map<String, Object> sourceAndMetadata = in.readMap();
|
||||
Map<String, Object> ingestMetadata = in.readMap();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
ingestMetadata.computeIfPresent("timestamp", (k, o) -> {
|
||||
Date date = (Date) o;
|
||||
return date.toInstant().atZone(ZoneId.systemDefault());
|
||||
});
|
||||
}
|
||||
this.ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata);
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
package org.opensearch.action.resync;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.index.IndexRequest;
|
||||
import org.opensearch.action.support.replication.ReplicatedWriteRequest;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -56,15 +55,6 @@ public final class ResyncReplicationRequest extends ReplicatedWriteRequest<Resyn
|
|||
|
||||
ResyncReplicationRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
assert Version.CURRENT.major <= 7;
|
||||
if (in.getVersion().equals(LegacyESVersion.V_6_0_0)) {
|
||||
/*
|
||||
* Resync replication request serialization was broken in 6.0.0 due to the elements of the stream not being prefixed with a
|
||||
* byte indicating the type of the operation.
|
||||
*/
|
||||
// TODO: remove this check in 8.0.0 which provides no BWC guarantees with 6.x.
|
||||
throw new IllegalStateException("resync replication request serialization is broken in 6.0.0");
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_4_0)) {
|
||||
trimAboveSeqNo = in.readZLong();
|
||||
} else {
|
||||
|
|
|
@ -1448,11 +1448,7 @@ public abstract class TransportReplicationAction<
|
|||
|
||||
public ConcreteReplicaRequest(Writeable.Reader<R> requestReader, StreamInput in) throws IOException {
|
||||
super(requestReader, in);
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
globalCheckpoint = in.readZLong();
|
||||
} else {
|
||||
globalCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_5_0)) {
|
||||
maxSeqNoOfUpdatesOrDeletes = in.readZLong();
|
||||
} else {
|
||||
|
@ -1477,9 +1473,7 @@ public abstract class TransportReplicationAction<
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeZLong(globalCheckpoint);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_5_0)) {
|
||||
out.writeZLong(maxSeqNoOfUpdatesOrDeletes);
|
||||
}
|
||||
|
|
|
@ -213,11 +213,7 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
public static IndexTemplateMetadata readFrom(StreamInput in) throws IOException {
|
||||
Builder builder = new Builder(in.readString());
|
||||
builder.order(in.readInt());
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
builder.patterns(in.readStringList());
|
||||
} else {
|
||||
builder.patterns(Collections.singletonList(in.readString()));
|
||||
}
|
||||
builder.settings(Settings.readSettingsFromStream(in));
|
||||
int mappingsSize = in.readVInt();
|
||||
for (int i = 0; i < mappingsSize; i++) {
|
||||
|
@ -248,11 +244,7 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeInt(order);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeStringCollection(patterns);
|
||||
} else {
|
||||
out.writeString(patterns.get(0));
|
||||
}
|
||||
Settings.writeSettingsToStream(settings, out);
|
||||
out.writeVInt(mappings.size());
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : mappings) {
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.opensearch.common.io.stream.StreamOutput;
|
|||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentHelper;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.index.mapper.DateFieldMapper;
|
||||
import org.opensearch.index.mapper.DocumentMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -184,13 +183,6 @@ public class MappingMetadata extends AbstractDiffable<MappingMetadata> {
|
|||
source().writeTo(out);
|
||||
// routing
|
||||
out.writeBoolean(routing().required());
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// timestamp
|
||||
out.writeBoolean(false); // enabled
|
||||
out.writeString(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern());
|
||||
out.writeOptionalString("now"); // 5.x default
|
||||
out.writeOptionalBoolean(null);
|
||||
}
|
||||
if (out.getVersion().before(LegacyESVersion.V_7_0_0)) {
|
||||
out.writeBoolean(false); // hasParentField
|
||||
}
|
||||
|
@ -223,16 +215,6 @@ public class MappingMetadata extends AbstractDiffable<MappingMetadata> {
|
|||
source = CompressedXContent.readCompressedString(in);
|
||||
// routing
|
||||
routing = new Routing(in.readBoolean());
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// timestamp
|
||||
boolean enabled = in.readBoolean();
|
||||
if (enabled) {
|
||||
throw new IllegalArgumentException("_timestamp may not be enabled");
|
||||
}
|
||||
in.readString(); // format
|
||||
in.readOptionalString(); // defaultTimestamp
|
||||
in.readOptionalBoolean(); // ignoreMissing
|
||||
}
|
||||
if (in.getVersion().before(LegacyESVersion.V_7_0_0)) {
|
||||
in.readBoolean(); // hasParentField
|
||||
}
|
||||
|
|
|
@ -1317,12 +1317,6 @@ public class MetadataCreateIndexService {
|
|||
static void validateSplitIndex(ClusterState state, String sourceIndex, String targetIndexName, Settings targetIndexSettings) {
|
||||
IndexMetadata sourceMetadata = validateResize(state, sourceIndex, targetIndexName, targetIndexSettings);
|
||||
IndexMetadata.selectSplitShard(0, sourceMetadata, IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.get(targetIndexSettings));
|
||||
if (sourceMetadata.getCreationVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// ensure we have a single type since this would make the splitting code considerably more complex
|
||||
// and a 5.x index would not be splittable unless it has been shrunk before so rather opt out of the complexity
|
||||
// since in 5.x we don't have a setting to artificially set the number of routing shards
|
||||
throw new IllegalStateException("source index created version is too old to apply a split operation");
|
||||
}
|
||||
}
|
||||
|
||||
static void validateCloneIndex(ClusterState state, String sourceIndex, String targetIndexName, Settings targetIndexSettings) {
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.cluster.routing;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.cluster.ClusterState;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.cluster.node.DiscoveryNodes;
|
||||
|
@ -280,7 +279,6 @@ public class OperationRouting {
|
|||
}
|
||||
// if not, then use it as the index
|
||||
int routingHash = Murmur3HashFunction.hash(preference);
|
||||
if (nodes.getMinNodeVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// The AllocationService lists shards in a fixed order based on nodes
|
||||
// so earlier versions of this class would have a tendency to
|
||||
// select the same node across different shardIds.
|
||||
|
@ -288,7 +286,7 @@ public class OperationRouting {
|
|||
// for a different element in the list by also incorporating the
|
||||
// shard ID into the hash of the user-supplied preference key.
|
||||
routingHash = 31 * routingHash + indexShard.shardId.hashCode();
|
||||
}
|
||||
|
||||
if (awarenessAttributes.isEmpty()) {
|
||||
return indexShard.activeInitializingShardsIt(routingHash);
|
||||
} else {
|
||||
|
|
|
@ -316,9 +316,7 @@ public final class UnassignedInfo implements ToXContentFragment, Writeable {
|
|||
}
|
||||
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_beta2) && reason == Reason.MANUAL_ALLOCATION) {
|
||||
out.writeByte((byte) Reason.ALLOCATION_FAILED.ordinal());
|
||||
} else if (out.getVersion().before(LegacyESVersion.V_7_0_0) && reason == Reason.INDEX_CLOSED) {
|
||||
if (out.getVersion().before(LegacyESVersion.V_7_0_0) && reason == Reason.INDEX_CLOSED) {
|
||||
out.writeByte((byte) Reason.REINITIALIZED.ordinal());
|
||||
} else {
|
||||
out.writeByte((byte) reason.ordinal());
|
||||
|
|
|
@ -334,10 +334,6 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
|
|||
|
||||
public NodeGatewayStartedShards(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// legacy version
|
||||
in.readLong();
|
||||
}
|
||||
allocationId = in.readOptionalString();
|
||||
primary = in.readBoolean();
|
||||
if (in.readBoolean()) {
|
||||
|
@ -373,10 +369,6 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// legacy version
|
||||
out.writeLong(-1L);
|
||||
}
|
||||
out.writeOptionalString(allocationId);
|
||||
out.writeBoolean(primary);
|
||||
if (storeException != null) {
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.apache.lucene.search.Sort;
|
|||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.SortedNumericSortField;
|
||||
import org.apache.lucene.search.SortedSetSortField;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.settings.Setting;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.index.fielddata.IndexFieldData;
|
||||
|
@ -149,19 +148,6 @@ public final class IndexSortConfig {
|
|||
List<String> fields = INDEX_SORT_FIELD_SETTING.get(settings);
|
||||
this.sortSpecs = fields.stream().map((name) -> new FieldSortSpec(name)).toArray(FieldSortSpec[]::new);
|
||||
|
||||
if (sortSpecs.length > 0 && indexSettings.getIndexVersionCreated().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
/**
|
||||
* This index might be assigned to a node where the index sorting feature is not available
|
||||
* (ie. versions prior to {@link LegacyESVersion.V_6_0_0_alpha1_UNRELEASED}) so we must fail here rather than later.
|
||||
*/
|
||||
throw new IllegalArgumentException(
|
||||
"unsupported index.version.created:"
|
||||
+ indexSettings.getIndexVersionCreated()
|
||||
+ ", can't set index.sort on versions prior to "
|
||||
+ LegacyESVersion.V_6_0_0_alpha1
|
||||
);
|
||||
}
|
||||
|
||||
if (INDEX_SORT_ORDER_SETTING.exists(settings)) {
|
||||
List<SortOrder> orders = INDEX_SORT_ORDER_SETTING.get(settings);
|
||||
if (orders.size() != sortSpecs.length) {
|
||||
|
|
|
@ -88,11 +88,7 @@ public class Segment implements Writeable {
|
|||
// verbose mode
|
||||
ramTree = readRamTree(in);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
segmentSort = readSegmentSort(in);
|
||||
} else {
|
||||
segmentSort = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_1_0) && in.readBoolean()) {
|
||||
attributes = in.readMap(StreamInput::readString, StreamInput::readString);
|
||||
} else {
|
||||
|
@ -207,9 +203,7 @@ public class Segment implements Writeable {
|
|||
if (verbose) {
|
||||
writeRamTree(out, ramTree);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
writeSegmentSort(out, segmentSort);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_1_0)) {
|
||||
boolean hasAttributes = attributes != null;
|
||||
out.writeBoolean(hasAttributes);
|
||||
|
|
|
@ -224,21 +224,7 @@ public class DynamicTemplate implements ToXContentObject {
|
|||
|
||||
XContentFieldType xcontentFieldType = null;
|
||||
if (matchMappingType != null && matchMappingType.equals("*") == false) {
|
||||
try {
|
||||
xcontentFieldType = XContentFieldType.fromString(matchMappingType);
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (indexVersionCreated.onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
throw e;
|
||||
} else {
|
||||
deprecationLogger.deprecate(
|
||||
"invalid_mapping_type",
|
||||
"match_mapping_type [" + matchMappingType + "] is invalid and will be ignored: " + e.getMessage()
|
||||
);
|
||||
// this template is on an unknown type so it will never match anything
|
||||
// null indicates that the template should be ignored
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final MatchType matchType = MatchType.fromString(matchPattern);
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.lucene.search.BooleanClause.Occur;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -100,9 +99,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
|
|||
shouldClauses.addAll(readQueries(in));
|
||||
filterClauses.addAll(readQueries(in));
|
||||
adjustPureNegative = in.readBoolean();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
in.readBoolean(); // disable_coord
|
||||
}
|
||||
minimumShouldMatch = in.readOptionalString();
|
||||
}
|
||||
|
||||
|
@ -113,9 +109,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
|
|||
writeQueries(out, shouldClauses);
|
||||
writeQueries(out, filterClauses);
|
||||
out.writeBoolean(adjustPureNegative);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeBoolean(true); // disable_coord
|
||||
}
|
||||
out.writeOptionalString(minimumShouldMatch);
|
||||
}
|
||||
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
|||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.Strings;
|
||||
|
@ -136,9 +135,6 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
|
|||
analyzer = in.readOptionalString();
|
||||
lowFreqMinimumShouldMatch = in.readOptionalString();
|
||||
highFreqMinimumShouldMatch = in.readOptionalString();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
in.readBoolean(); // disable_coord
|
||||
}
|
||||
cutoffFrequency = in.readFloat();
|
||||
}
|
||||
|
||||
|
@ -151,9 +147,6 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
|
|||
out.writeOptionalString(analyzer);
|
||||
out.writeOptionalString(lowFreqMinimumShouldMatch);
|
||||
out.writeOptionalString(highFreqMinimumShouldMatch);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeBoolean(true); // disable_coord
|
||||
}
|
||||
out.writeFloat(cutoffFrequency);
|
||||
}
|
||||
|
||||
|
|
|
@ -135,13 +135,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
super(in);
|
||||
fieldName = in.readString();
|
||||
value = in.readGenericValue();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_rc1)) {
|
||||
MatchQuery.Type.readFromStream(in); // deprecated type
|
||||
}
|
||||
operator = Operator.readFromStream(in);
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_rc1)) {
|
||||
in.readVInt(); // deprecated slop
|
||||
}
|
||||
prefixLength = in.readVInt();
|
||||
maxExpansions = in.readVInt();
|
||||
fuzzyTranspositions = in.readBoolean();
|
||||
|
@ -162,13 +156,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeGenericValue(value);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_rc1)) {
|
||||
MatchQuery.Type.BOOLEAN.writeTo(out); // deprecated type
|
||||
}
|
||||
operator.writeTo(out);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_rc1)) {
|
||||
out.writeVInt(MatchQuery.DEFAULT_PHRASE_SLOP); // deprecated slop
|
||||
}
|
||||
out.writeVInt(prefixLength);
|
||||
out.writeVInt(maxExpansions);
|
||||
out.writeBoolean(fuzzyTranspositions);
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
package org.opensearch.index.query.functionscore;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -67,10 +66,8 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
|
|||
if (in.readBoolean()) {
|
||||
seed = in.readInt();
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
field = in.readOptionalString();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
|
@ -80,10 +77,8 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
|
|||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
out.writeOptionalString(field);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
|
|
|
@ -54,7 +54,6 @@ import org.apache.lucene.util.BitSet;
|
|||
import org.apache.lucene.util.BitSetIterator;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.cluster.routing.OperationRouting;
|
||||
|
@ -79,11 +78,6 @@ final class ShardSplittingQuery extends Query {
|
|||
private final BitSetProducer nestedParentBitSetProducer;
|
||||
|
||||
ShardSplittingQuery(IndexMetadata indexMetadata, int shardId, boolean hasNested) {
|
||||
if (indexMetadata.getCreationVersion().before(LegacyESVersion.V_6_0_0_rc2)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Splitting query can only be executed on an index created with version " + LegacyESVersion.V_6_0_0_rc2 + " or higher"
|
||||
);
|
||||
}
|
||||
this.indexMetadata = indexMetadata;
|
||||
this.shardId = shardId;
|
||||
this.nestedParentBitSetProducer = hasNested ? newParentDocBitSetProducer(indexMetadata.getCreationVersion()) : null;
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.apache.lucene.store.FilterDirectory;
|
|||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.opensearch.ExceptionsHelper;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.ActionListener;
|
||||
import org.opensearch.action.StepListener;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
|
@ -141,9 +140,6 @@ final class StoreRecovery {
|
|||
Sort indexSort = indexShard.getIndexSort();
|
||||
final boolean hasNested = indexShard.mapperService().hasNested();
|
||||
final boolean isSplit = sourceMetadata.getNumberOfShards() < indexShard.indexSettings().getNumberOfShards();
|
||||
assert isSplit == false
|
||||
|| sourceMetadata.getCreationVersion()
|
||||
.onOrAfter(LegacyESVersion.V_6_0_0_alpha1) : "for split we require a single type but the index is created before 6.0.0";
|
||||
ActionListener.completeWith(recoveryListener(indexShard, listener), () -> {
|
||||
logger.debug("starting recovery from local shards {}", shards);
|
||||
try {
|
||||
|
|
|
@ -62,9 +62,6 @@ public class StoreStats implements Writeable, ToXContentFragment {
|
|||
|
||||
public StoreStats(StreamInput in) throws IOException {
|
||||
sizeInBytes = in.readVLong();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
in.readVLong(); // throttleTimeInNanos
|
||||
}
|
||||
if (in.getVersion().onOrAfter(RESERVED_BYTES_VERSION)) {
|
||||
reservedSize = in.readZLong();
|
||||
} else {
|
||||
|
@ -122,9 +119,6 @@ public class StoreStats implements Writeable, ToXContentFragment {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(sizeInBytes);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeVLong(0L); // throttleTimeInNanos
|
||||
}
|
||||
if (out.getVersion().onOrAfter(RESERVED_BYTES_VERSION)) {
|
||||
out.writeZLong(reservedSize);
|
||||
}
|
||||
|
|
|
@ -55,13 +55,8 @@ public class TranslogStats implements Writeable, ToXContentFragment {
|
|||
public TranslogStats(StreamInput in) throws IOException {
|
||||
numberOfOperations = in.readVInt();
|
||||
translogSizeInBytes = in.readVLong();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
uncommittedOperations = in.readVInt();
|
||||
uncommittedSizeInBytes = in.readVLong();
|
||||
} else {
|
||||
uncommittedOperations = numberOfOperations;
|
||||
uncommittedSizeInBytes = translogSizeInBytes;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_3_0)) {
|
||||
earliestLastModifiedAge = in.readVLong();
|
||||
}
|
||||
|
@ -155,10 +150,8 @@ public class TranslogStats implements Writeable, ToXContentFragment {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(numberOfOperations);
|
||||
out.writeVLong(translogSizeInBytes);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
out.writeVInt(uncommittedOperations);
|
||||
out.writeVLong(uncommittedSizeInBytes);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_3_0)) {
|
||||
out.writeVLong(earliestLastModifiedAge);
|
||||
}
|
||||
|
|
|
@ -97,14 +97,7 @@ public class TermsLookup implements Writeable, ToXContentFragment {
|
|||
}
|
||||
id = in.readString();
|
||||
path = in.readString();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
index = in.readString();
|
||||
} else {
|
||||
index = in.readOptionalString();
|
||||
if (index == null) {
|
||||
throw new IllegalStateException("index must not be null in a terms lookup");
|
||||
}
|
||||
}
|
||||
routing = in.readOptionalString();
|
||||
}
|
||||
|
||||
|
@ -123,11 +116,7 @@ public class TermsLookup implements Writeable, ToXContentFragment {
|
|||
}
|
||||
out.writeString(id);
|
||||
out.writeString(path);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_beta1)) {
|
||||
out.writeString(index);
|
||||
} else {
|
||||
out.writeOptionalString(index);
|
||||
}
|
||||
out.writeOptionalString(routing);
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@
|
|||
package org.opensearch.indices.recovery;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.action.index.IndexRequest;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
|
@ -58,9 +57,6 @@ class RecoveryPrepareForTranslogOperationsRequest extends RecoveryTransportReque
|
|||
recoveryId = in.readLong();
|
||||
shardId = new ShardId(in);
|
||||
totalTranslogOps = in.readVInt();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
in.readLong(); // maxUnsafeAutoIdTimestamp
|
||||
}
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_2_0) && in.getVersion().before(LegacyESVersion.V_7_4_0)) {
|
||||
in.readBoolean(); // was fileBasedRecovery
|
||||
}
|
||||
|
@ -84,9 +80,6 @@ class RecoveryPrepareForTranslogOperationsRequest extends RecoveryTransportReque
|
|||
out.writeLong(recoveryId);
|
||||
shardId.writeTo(out);
|
||||
out.writeVInt(totalTranslogOps);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeLong(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP); // maxUnsafeAutoIdTimestamp
|
||||
}
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_2_0) && out.getVersion().before(LegacyESVersion.V_7_4_0)) {
|
||||
out.writeBoolean(true); // was fileBasedRecovery
|
||||
}
|
||||
|
|
|
@ -411,11 +411,8 @@ public class RecoverySourceHandler {
|
|||
|
||||
private boolean isTargetSameHistory() {
|
||||
final String targetHistoryUUID = request.metadataSnapshot().getHistoryUUID();
|
||||
assert targetHistoryUUID != null
|
||||
|| shard.indexSettings()
|
||||
.getIndexVersionCreated()
|
||||
.before(LegacyESVersion.V_6_0_0_rc1) : "incoming target history N/A but index was created after or on 6.0.0-rc1";
|
||||
return targetHistoryUUID != null && targetHistoryUUID.equals(shard.getHistoryUUID());
|
||||
assert targetHistoryUUID != null : "incoming target history missing";
|
||||
return targetHistoryUUID.equals(shard.getHistoryUUID());
|
||||
}
|
||||
|
||||
static void runUnderPrimaryPermit(
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.lucene.index.CorruptIndexException;
|
|||
import org.apache.lucene.index.IndexFormatTooNewException;
|
||||
import org.apache.lucene.index.IndexFormatTooOldException;
|
||||
import org.opensearch.Assertions;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchException;
|
||||
import org.opensearch.ExceptionsHelper;
|
||||
import org.opensearch.action.ActionListener;
|
||||
|
@ -455,9 +454,6 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget
|
|||
store.incRef();
|
||||
try {
|
||||
store.cleanupAndVerify("recovery CleanFilesRequestHandler", sourceMetadata);
|
||||
if (indexShard.indexSettings().getIndexVersionCreated().before(LegacyESVersion.V_6_0_0_rc1)) {
|
||||
store.ensureIndexHasHistoryUUID();
|
||||
}
|
||||
final String translogUUID = Translog.createEmptyTranslog(
|
||||
indexShard.shardPath().resolveTranslog(),
|
||||
globalCheckpoint,
|
||||
|
|
|
@ -32,10 +32,8 @@
|
|||
|
||||
package org.opensearch.indices.recovery;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
import org.opensearch.index.seqno.SequenceNumbers;
|
||||
import org.opensearch.transport.TransportResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -49,19 +47,11 @@ final class RecoveryTranslogOperationsResponse extends TransportResponse {
|
|||
|
||||
RecoveryTranslogOperationsResponse(final StreamInput in) throws IOException {
|
||||
super(in);
|
||||
// before 6.0.0 we received an empty response so we have to maintain that
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
localCheckpoint = in.readZLong();
|
||||
} else {
|
||||
localCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
// before 6.0.0 we responded with an empty response so we have to maintain that
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeZLong(localCheckpoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.indices.recovery;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.cluster.node.DiscoveryNode;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -66,11 +65,7 @@ public class StartRecoveryRequest extends TransportRequest {
|
|||
targetNode = new DiscoveryNode(in);
|
||||
metadataSnapshot = new Store.MetadataSnapshot(in);
|
||||
primaryRelocation = in.readBoolean();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
startingSeqNo = in.readLong();
|
||||
} else {
|
||||
startingSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -149,9 +144,7 @@ public class StartRecoveryRequest extends TransportRequest {
|
|||
targetNode.writeTo(out);
|
||||
metadataSnapshot.writeTo(out);
|
||||
out.writeBoolean(primaryRelocation);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeLong(startingSeqNo);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -83,9 +83,6 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContentFragm
|
|||
total = in.readLong();
|
||||
free = in.readLong();
|
||||
available = in.readLong();
|
||||
if (in.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
in.readOptionalBoolean();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -96,9 +93,6 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContentFragm
|
|||
out.writeLong(total);
|
||||
out.writeLong(free);
|
||||
out.writeLong(available);
|
||||
if (out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
out.writeOptionalBoolean(null);
|
||||
}
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
|
|
|
@ -174,9 +174,9 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
extendedPlugins = Collections.emptyList();
|
||||
}
|
||||
hasNativeController = in.readBoolean();
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_beta2) && in.getVersion().before(LegacyESVersion.V_6_3_0)) {
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.fromId(6000027)) && in.getVersion().before(LegacyESVersion.V_6_3_0)) {
|
||||
/*
|
||||
* Elasticsearch versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was
|
||||
* Legacy versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was
|
||||
* serialized into the plugin info. Therefore, we have to read and ignore this value from the stream.
|
||||
*/
|
||||
in.readBoolean();
|
||||
|
@ -204,7 +204,7 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
out.writeStringCollection(extendedPlugins);
|
||||
}
|
||||
out.writeBoolean(hasNativeController);
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_beta2) && out.getVersion().before(LegacyESVersion.V_6_3_0)) {
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.fromId(6000027)) && out.getVersion().before(LegacyESVersion.V_6_3_0)) {
|
||||
/*
|
||||
* Elasticsearch versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was
|
||||
* serialized into the plugin info. Therefore, we have to write out a value for this boolean.
|
||||
|
@ -301,7 +301,7 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
}
|
||||
}
|
||||
|
||||
if (opensearchVersion.before(LegacyESVersion.V_6_3_0) && opensearchVersion.onOrAfter(LegacyESVersion.V_6_0_0_beta2)) {
|
||||
if (opensearchVersion.before(LegacyESVersion.V_6_3_0) && opensearchVersion.onOrAfter(LegacyESVersion.fromId(6000027))) {
|
||||
propsMap.remove("requires.keystore");
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
|
||||
package org.opensearch.search.aggregations;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.ParsingException;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -451,32 +450,7 @@ public abstract class InternalOrder extends BucketOrder {
|
|||
* @throws IOException on error reading from the stream.
|
||||
*/
|
||||
public static BucketOrder readHistogramOrder(StreamInput in, boolean bwcOrderFlag) throws IOException {
|
||||
if (in.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
return Streams.readOrder(in);
|
||||
} else { // backwards compat logic
|
||||
if (bwcOrderFlag == false || in.readBoolean()) {
|
||||
// translate the old histogram order IDs to the new order objects
|
||||
byte id = in.readByte();
|
||||
switch (id) {
|
||||
case 1:
|
||||
return KEY_ASC;
|
||||
case 2:
|
||||
return KEY_DESC;
|
||||
case 3:
|
||||
return COUNT_ASC;
|
||||
case 4:
|
||||
return COUNT_DESC;
|
||||
case 0: // aggregation order stream logic is backwards compatible
|
||||
boolean asc = in.readBoolean();
|
||||
String key = in.readString();
|
||||
return new Aggregation(key, asc);
|
||||
default: // not expecting compound order ID
|
||||
throw new RuntimeException("unknown histogram order id [" + id + "]");
|
||||
}
|
||||
} else { // default to _key asc if no order specified
|
||||
return KEY_ASC;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -511,41 +485,7 @@ public abstract class InternalOrder extends BucketOrder {
|
|||
* @throws IOException on error writing to the stream.
|
||||
*/
|
||||
public static void writeHistogramOrder(BucketOrder order, StreamOutput out, boolean bwcOrderFlag) throws IOException {
|
||||
if (out.getVersion().onOrAfter(LegacyESVersion.V_6_0_0_alpha2)) {
|
||||
order.writeTo(out);
|
||||
} else { // backwards compat logic
|
||||
if (bwcOrderFlag) { // need to add flag that determines if order exists
|
||||
out.writeBoolean(true); // order always exists
|
||||
}
|
||||
if (order instanceof CompoundOrder) {
|
||||
// older versions do not support histogram compound order; the best we can do here is use the first order.
|
||||
order = ((CompoundOrder) order).orderElements.get(0);
|
||||
}
|
||||
if (order instanceof Aggregation) {
|
||||
// aggregation order stream logic is backwards compatible
|
||||
order.writeTo(out);
|
||||
} else {
|
||||
// convert the new order IDs to the old histogram order IDs.
|
||||
byte id;
|
||||
switch (order.id()) {
|
||||
case COUNT_DESC_ID:
|
||||
id = 4;
|
||||
break;
|
||||
case COUNT_ASC_ID:
|
||||
id = 3;
|
||||
break;
|
||||
case KEY_DESC_ID:
|
||||
id = 2;
|
||||
break;
|
||||
case KEY_ASC_ID:
|
||||
id = 1;
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("unknown order id [" + order.id() + "]");
|
||||
}
|
||||
out.writeByte(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.threadpool;
|
|||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.Nullable;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -691,12 +690,7 @@ public class ThreadPool implements ReportingService<ThreadPoolInfo>, Scheduler {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
if (type == ThreadPoolType.FIXED_AUTO_QUEUE_SIZE && out.getVersion().before(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
// 5.x doesn't know about the "fixed_auto_queue_size" thread pool type, just write fixed.
|
||||
out.writeString(ThreadPoolType.FIXED.getType());
|
||||
} else {
|
||||
out.writeString(type.getType());
|
||||
}
|
||||
out.writeInt(min);
|
||||
out.writeInt(max);
|
||||
out.writeOptionalTimeValue(keepAlive);
|
||||
|
|
|
@ -913,7 +913,7 @@ public class ExceptionSerializationTests extends OpenSearchTestCase {
|
|||
public void testShardLockObtainFailedException() throws IOException {
|
||||
ShardId shardId = new ShardId("foo", "_na_", 1);
|
||||
ShardLockObtainFailedException orig = new ShardLockObtainFailedException(shardId, "boom");
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
ShardLockObtainFailedException ex = serialize(orig, version);
|
||||
assertEquals(orig.getMessage(), ex.getMessage());
|
||||
assertEquals(orig.getShardId(), ex.getShardId());
|
||||
|
|
|
@ -85,7 +85,7 @@ public class LegacyESVersionTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
public void testMinimumIndexCompatibilityVersion() {
|
||||
assertEquals(LegacyESVersion.fromId(5000099), LegacyESVersion.V_6_0_0_beta1.minimumIndexCompatibilityVersion());
|
||||
assertEquals(LegacyESVersion.fromId(5000099), LegacyESVersion.fromId(6000026).minimumIndexCompatibilityVersion());
|
||||
assertEquals(LegacyESVersion.fromId(2000099), LegacyESVersion.fromId(5000099).minimumIndexCompatibilityVersion());
|
||||
assertEquals(LegacyESVersion.fromId(2000099), LegacyESVersion.fromId(5010000).minimumIndexCompatibilityVersion());
|
||||
assertEquals(LegacyESVersion.fromId(2000099), LegacyESVersion.fromId(5000001).minimumIndexCompatibilityVersion());
|
||||
|
|
|
@ -222,7 +222,9 @@ public class VersionTests extends OpenSearchTestCase {
|
|||
|
||||
/** test opensearch min index compatibility */
|
||||
public void testOpenSearchMinIndexCompatVersion() {
|
||||
Version opensearchVersion = VersionUtils.randomOpenSearchVersion(random());
|
||||
// setting to CURRENT to enforce minIndexCompat Version during version bump
|
||||
// previous compatibility versions are guaranteed to be tested during past releases
|
||||
Version opensearchVersion = Version.CURRENT;
|
||||
// opensearch 1.x minIndexCompat is Legacy 6.8.0
|
||||
// opensearch 2.x minCompat is Legacy 7.10.0
|
||||
// opensearch 3.x minCompat is 1.{last minor version}.0
|
||||
|
@ -461,5 +463,4 @@ public class VersionTests extends OpenSearchTestCase {
|
|||
Version VERSION_5_1_0_UNRELEASED = Version.fromString("5.1.0");
|
||||
VersionTests.assertUnknownVersion(VERSION_5_1_0_UNRELEASED);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -185,7 +185,7 @@ public class ClusterHealthResponsesTests extends AbstractSerializingTestCase<Clu
|
|||
);
|
||||
|
||||
BytesStreamOutput out_lt_1_0 = new BytesStreamOutput();
|
||||
Version old_version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_8_0);
|
||||
Version old_version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, LegacyESVersion.V_7_8_0);
|
||||
out_lt_1_0.setVersion(old_version);
|
||||
clusterHealth.writeTo(out_lt_1_0);
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.cluster.settings;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.io.stream.Writeable;
|
||||
import org.opensearch.common.settings.ClusterSettings;
|
||||
import org.opensearch.common.settings.Setting;
|
||||
|
@ -125,6 +124,6 @@ public class ClusterUpdateSettingsResponseTests extends AbstractSerializingTestC
|
|||
|
||||
public void testOldSerialisation() throws IOException {
|
||||
ClusterUpdateSettingsResponse original = createTestInstance();
|
||||
assertSerialization(original, VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_4_0));
|
||||
assertSerialization(original, VersionUtils.randomIndexCompatibleVersion(random()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.action.admin.cluster.shards;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.node.DiscoveryNode;
|
||||
import org.opensearch.cluster.routing.ShardRouting;
|
||||
|
@ -97,7 +96,7 @@ public class ClusterSearchShardsResponseTests extends OpenSearchTestCase {
|
|||
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
||||
entries.addAll(searchModule.getNamedWriteables());
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
clusterSearchShardsResponse.writeTo(out);
|
||||
|
|
|
@ -32,15 +32,10 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.close;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchException;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.NoShardAvailableActionException;
|
||||
import org.opensearch.action.admin.indices.close.CloseIndexResponse.IndexResult;
|
||||
import org.opensearch.action.support.master.AcknowledgedResponse;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.io.stream.BytesStreamOutput;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.Writeable;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
|
@ -49,7 +44,6 @@ import org.opensearch.index.Index;
|
|||
import org.opensearch.index.IndexNotFoundException;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
import org.opensearch.test.AbstractWireSerializingTestCase;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
import org.opensearch.transport.ActionNotFoundTransportException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -58,10 +52,7 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.opensearch.test.VersionUtils.randomVersionBetween;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -181,57 +172,6 @@ public class CloseIndexResponseTests extends AbstractWireSerializingTestCase<Clo
|
|||
);
|
||||
}
|
||||
|
||||
public void testBwcSerialization() throws Exception {
|
||||
{
|
||||
final CloseIndexResponse response = randomResponse();
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(
|
||||
randomVersionBetween(random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_2_0))
|
||||
);
|
||||
response.writeTo(out);
|
||||
|
||||
try (StreamInput in = out.bytes().streamInput()) {
|
||||
in.setVersion(out.getVersion());
|
||||
final AcknowledgedResponse deserializedResponse = new AcknowledgedResponse(in);
|
||||
assertThat(deserializedResponse.isAcknowledged(), equalTo(response.isAcknowledged()));
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
final AcknowledgedResponse response = new AcknowledgedResponse(randomBoolean());
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
response.writeTo(out);
|
||||
|
||||
try (StreamInput in = out.bytes().streamInput()) {
|
||||
in.setVersion(
|
||||
randomVersionBetween(random(), LegacyESVersion.V_6_0_0, VersionUtils.getPreviousVersion(LegacyESVersion.V_7_2_0))
|
||||
);
|
||||
final CloseIndexResponse deserializedResponse = new CloseIndexResponse(in);
|
||||
assertThat(deserializedResponse.isAcknowledged(), equalTo(response.isAcknowledged()));
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
final CloseIndexResponse response = randomResponse();
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
Version version = randomVersionBetween(random(), LegacyESVersion.V_7_2_0, Version.CURRENT);
|
||||
out.setVersion(version);
|
||||
response.writeTo(out);
|
||||
try (StreamInput in = out.bytes().streamInput()) {
|
||||
in.setVersion(version);
|
||||
final CloseIndexResponse deserializedResponse = new CloseIndexResponse(in);
|
||||
assertThat(deserializedResponse.isAcknowledged(), equalTo(response.isAcknowledged()));
|
||||
assertThat(deserializedResponse.isShardsAcknowledged(), equalTo(response.isShardsAcknowledged()));
|
||||
if (version.onOrAfter(LegacyESVersion.V_7_3_0)) {
|
||||
assertThat(deserializedResponse.getIndices(), hasSize(response.getIndices().size()));
|
||||
} else {
|
||||
assertThat(deserializedResponse.getIndices(), empty());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private CloseIndexResponse randomResponse() {
|
||||
boolean acknowledged = true;
|
||||
final String[] indicesNames = generateRandomStringArray(10, 10, false, true);
|
||||
|
|
|
@ -32,15 +32,12 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.rollover;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.common.io.stream.Writeable;
|
||||
import org.opensearch.common.unit.ByteSizeValue;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.test.AbstractSerializingTestCase;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -196,9 +193,4 @@ public class RolloverResponseTests extends AbstractSerializingTestCase<RolloverR
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
public void testOldSerialisation() throws IOException {
|
||||
RolloverResponse original = createTestInstance();
|
||||
assertSerialization(original, VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_4_0));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class MainResponseTests extends AbstractSerializingTestCase<MainResponse>
|
|||
ClusterName clusterName = new ClusterName(randomAlphaOfLength(10));
|
||||
String nodeName = randomAlphaOfLength(10);
|
||||
final String date = new Date(randomNonNegativeLong()).toString();
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_1, Version.CURRENT);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
|
||||
Build build = new Build(
|
||||
Build.Type.UNKNOWN,
|
||||
randomAlphaOfLength(8),
|
||||
|
|
|
@ -46,9 +46,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
|
||||
import static java.util.EnumSet.copyOf;
|
||||
import static org.opensearch.test.VersionUtils.getPreviousVersion;
|
||||
import static org.opensearch.test.VersionUtils.randomVersion;
|
||||
import static org.opensearch.test.VersionUtils.randomVersionBetween;
|
||||
import static org.hamcrest.CoreMatchers.endsWith;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
|
@ -76,45 +74,6 @@ public class ClusterBlockTests extends OpenSearchTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testBwcSerialization() throws Exception {
|
||||
for (int runs = 0; runs < randomIntBetween(5, 20); runs++) {
|
||||
// Generate a random cluster block in version < 7.0.0
|
||||
final Version version = randomVersionBetween(random(), LegacyESVersion.V_6_0_0, getPreviousVersion(LegacyESVersion.V_6_7_0));
|
||||
final ClusterBlock expected = randomClusterBlock(version);
|
||||
assertNull(expected.uuid());
|
||||
|
||||
// Serialize to node in current version
|
||||
final BytesStreamOutput out = new BytesStreamOutput();
|
||||
expected.writeTo(out);
|
||||
|
||||
// Deserialize and check the cluster block
|
||||
final ClusterBlock actual = new ClusterBlock(out.bytes().streamInput());
|
||||
assertClusterBlockEquals(expected, actual);
|
||||
}
|
||||
|
||||
for (int runs = 0; runs < randomIntBetween(5, 20); runs++) {
|
||||
// Generate a random cluster block in current version
|
||||
final ClusterBlock expected = randomClusterBlock(Version.CURRENT);
|
||||
|
||||
// Serialize to node in version < 7.0.0
|
||||
final BytesStreamOutput out = new BytesStreamOutput();
|
||||
out.setVersion(randomVersionBetween(random(), LegacyESVersion.V_6_0_0, getPreviousVersion(LegacyESVersion.V_6_7_0)));
|
||||
expected.writeTo(out);
|
||||
|
||||
// Deserialize and check the cluster block
|
||||
final StreamInput in = out.bytes().streamInput();
|
||||
in.setVersion(out.getVersion());
|
||||
final ClusterBlock actual = new ClusterBlock(in);
|
||||
|
||||
assertThat(actual.id(), equalTo(expected.id()));
|
||||
assertThat(actual.status(), equalTo(expected.status()));
|
||||
assertThat(actual.description(), equalTo(expected.description()));
|
||||
assertThat(actual.retryable(), equalTo(expected.retryable()));
|
||||
assertThat(actual.disableStatePersistence(), equalTo(expected.disableStatePersistence()));
|
||||
assertArrayEquals(actual.levels().toArray(), expected.levels().toArray());
|
||||
}
|
||||
}
|
||||
|
||||
public void testToStringDanglingComma() {
|
||||
final ClusterBlock clusterBlock = randomClusterBlock();
|
||||
assertThat(clusterBlock.toString(), not(endsWith(",")));
|
||||
|
|
|
@ -55,7 +55,6 @@ import java.util.Map;
|
|||
|
||||
import static org.mockito.Matchers.anyBoolean;
|
||||
import static org.opensearch.test.VersionUtils.getPreviousVersion;
|
||||
import static org.opensearch.test.VersionUtils.incompatibleFutureVersion;
|
||||
import static org.opensearch.test.VersionUtils.maxCompatibleVersion;
|
||||
import static org.opensearch.test.VersionUtils.randomCompatibleVersion;
|
||||
import static org.opensearch.test.VersionUtils.randomVersion;
|
||||
|
@ -119,17 +118,6 @@ public class JoinTaskExecutorTests extends OpenSearchTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
if (minNodeVersion.before(LegacyESVersion.V_6_0_0)) {
|
||||
Version tooHigh = incompatibleFutureVersion(minNodeVersion);
|
||||
expectThrows(IllegalStateException.class, () -> {
|
||||
if (randomBoolean()) {
|
||||
JoinTaskExecutor.ensureNodesCompatibility(tooHigh, nodes);
|
||||
} else {
|
||||
JoinTaskExecutor.ensureNodesCompatibility(tooHigh, minNodeVersion, maxNodeVersion);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (minNodeVersion.onOrAfter(LegacyESVersion.V_7_0_0)) {
|
||||
Version oldMajor = LegacyESVersion.V_6_4_0.minimumCompatibilityVersion();
|
||||
expectThrows(IllegalStateException.class, () -> JoinTaskExecutor.ensureMajorVersionBarrier(oldMajor, minNodeVersion));
|
||||
|
|
|
@ -206,15 +206,6 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
public void testNumberOfShards() {
|
||||
{
|
||||
final Version versionCreated = VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0_alpha1,
|
||||
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
|
||||
);
|
||||
final Settings.Builder indexSettingsBuilder = Settings.builder().put(SETTING_VERSION_CREATED, versionCreated);
|
||||
assertThat(MetadataCreateIndexService.getNumberOfShards(indexSettingsBuilder), equalTo(5));
|
||||
}
|
||||
{
|
||||
final Version versionCreated = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
|
||||
final Settings.Builder indexSettingsBuilder = Settings.builder().put(SETTING_VERSION_CREATED, versionCreated);
|
||||
|
@ -674,7 +665,7 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
|
||||
double ratio = numRoutingShards / randomNumShards;
|
||||
int intRatio = (int) ratio;
|
||||
assertEquals(ratio, (double) (intRatio), 0.0d);
|
||||
assertEquals(ratio, intRatio, 0.0d);
|
||||
assertTrue(1 < ratio);
|
||||
assertTrue(ratio <= 1024);
|
||||
assertEquals(0, intRatio % 2);
|
||||
|
|
|
@ -209,7 +209,7 @@ public class MetadataIndexUpgradeServiceTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_CREATION_DATE, 1)
|
||||
.put(IndexMetadata.SETTING_INDEX_UUID, "BOOM")
|
||||
.put(IndexMetadata.SETTING_VERSION_UPGRADED, LegacyESVersion.V_6_0_0_alpha1)
|
||||
.put(IndexMetadata.SETTING_VERSION_UPGRADED, Version.CURRENT.minimumIndexCompatibilityVersion())
|
||||
.put(indexSettings)
|
||||
.build();
|
||||
return IndexMetadata.builder(name).settings(build).build();
|
||||
|
|
|
@ -34,7 +34,6 @@ package org.opensearch.cluster.routing;
|
|||
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.ClusterName;
|
||||
import org.opensearch.cluster.ClusterState;
|
||||
|
@ -49,14 +48,12 @@ import org.opensearch.cluster.routing.allocation.FailedShard;
|
|||
import org.opensearch.common.UUIDs;
|
||||
import org.opensearch.common.io.stream.ByteBufferStreamInput;
|
||||
import org.opensearch.common.io.stream.BytesStreamOutput;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
import org.opensearch.index.Index;
|
||||
import org.opensearch.repositories.IndexId;
|
||||
import org.opensearch.snapshots.Snapshot;
|
||||
import org.opensearch.snapshots.SnapshotId;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
@ -130,28 +127,6 @@ public class UnassignedInfoTests extends OpenSearchAllocationTestCase {
|
|||
assertThat(read.getFailedNodeIds(), equalTo(meta.getFailedNodeIds()));
|
||||
}
|
||||
|
||||
public void testBwcSerialization() throws Exception {
|
||||
final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CLOSED, "message");
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
out.setVersion(version);
|
||||
unassignedInfo.writeTo(out);
|
||||
out.close();
|
||||
|
||||
StreamInput in = out.bytes().streamInput();
|
||||
in.setVersion(version);
|
||||
UnassignedInfo read = new UnassignedInfo(in);
|
||||
if (version.before(LegacyESVersion.V_7_0_0)) {
|
||||
assertThat(read.getReason(), equalTo(UnassignedInfo.Reason.REINITIALIZED));
|
||||
} else {
|
||||
assertThat(read.getReason(), equalTo(UnassignedInfo.Reason.INDEX_CLOSED));
|
||||
}
|
||||
assertThat(read.getUnassignedTimeInMillis(), equalTo(unassignedInfo.getUnassignedTimeInMillis()));
|
||||
assertThat(read.getMessage(), equalTo(unassignedInfo.getMessage()));
|
||||
assertThat(read.getDetails(), equalTo(unassignedInfo.getDetails()));
|
||||
assertThat(read.getNumFailedAllocations(), equalTo(unassignedInfo.getNumFailedAllocations()));
|
||||
}
|
||||
|
||||
public void testIndexCreated() {
|
||||
Metadata metadata = Metadata.builder()
|
||||
.put(
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.cluster.routing.allocation;
|
|||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.admin.cluster.reroute.ClusterRerouteRequest;
|
||||
import org.opensearch.action.admin.indices.create.CreateIndexRequest;
|
||||
|
@ -253,7 +252,7 @@ public class FailedNodeRoutingTests extends OpenSearchAllocationTestCase {
|
|||
buildNewFakeTransportAddress(),
|
||||
Collections.emptyMap(),
|
||||
roles,
|
||||
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0_alpha1, null)
|
||||
VersionUtils.randomIndexCompatibleVersion(random())
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.cluster.routing.allocation;
|
|||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.ClusterState;
|
||||
import org.opensearch.cluster.OpenSearchAllocationTestCase;
|
||||
|
@ -693,8 +692,18 @@ public class FailedShardsRoutingTests extends OpenSearchAllocationTestCase {
|
|||
clusterState = ClusterState.builder(clusterState)
|
||||
.nodes(
|
||||
DiscoveryNodes.builder(clusterState.nodes())
|
||||
.add(newNode("node3-6.x", VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0_alpha1, null)))
|
||||
.add(newNode("node4-6.x", VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0_alpha1, null)))
|
||||
.add(
|
||||
newNode(
|
||||
"node3-old",
|
||||
VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumIndexCompatibilityVersion(), null)
|
||||
)
|
||||
)
|
||||
.add(
|
||||
newNode(
|
||||
"node4-old",
|
||||
VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumIndexCompatibilityVersion(), null)
|
||||
)
|
||||
)
|
||||
)
|
||||
.build();
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.common.geo;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchParseException;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
|
@ -407,7 +406,7 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase {
|
|||
shellCoordinates.add(new Coordinate(100, 0, 10));
|
||||
Coordinate[] coordinates = shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]);
|
||||
|
||||
Version randomVersion = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
Version randomVersion = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, randomVersion)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
|
|
|
@ -231,7 +231,7 @@ public class VersionsTests extends OpenSearchTestCase {
|
|||
|
||||
// too old version, major should be the oldest supported lucene version minus 1
|
||||
version = LegacyESVersion.fromString("5.2.1");
|
||||
assertEquals(LegacyESVersion.V_6_0_0.luceneVersion.major - 1, version.luceneVersion.major);
|
||||
assertEquals(VersionUtils.getFirstVersion().luceneVersion.major - 1, version.luceneVersion.major);
|
||||
|
||||
// future version, should be the same version as today
|
||||
version = Version.fromString("2.77.1");
|
||||
|
|
|
@ -47,7 +47,6 @@ import org.opensearch.common.xcontent.XContentBuilder;
|
|||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -56,7 +55,6 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
|
@ -634,57 +632,6 @@ public class SettingsTests extends OpenSearchTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
public void testReadLegacyFromStream() throws IOException {
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(VersionUtils.getPreviousVersion(LegacyESVersion.V_6_1_0));
|
||||
output.writeVInt(5);
|
||||
output.writeString("foo.bar.1");
|
||||
output.writeOptionalString("1");
|
||||
output.writeString("foo.bar.0");
|
||||
output.writeOptionalString("0");
|
||||
output.writeString("foo.bar.2");
|
||||
output.writeOptionalString("2");
|
||||
output.writeString("foo.bar.3");
|
||||
output.writeOptionalString("3");
|
||||
output.writeString("foo.bar.baz");
|
||||
output.writeOptionalString("baz");
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
|
||||
in.setVersion(VersionUtils.getPreviousVersion(LegacyESVersion.V_6_1_0));
|
||||
Settings settings = Settings.readSettingsFromStream(in);
|
||||
assertEquals(2, settings.size());
|
||||
assertEquals(Arrays.asList("0", "1", "2", "3"), settings.getAsList("foo.bar"));
|
||||
assertEquals("baz", settings.get("foo.bar.baz"));
|
||||
}
|
||||
|
||||
public void testWriteLegacyOutput() throws IOException {
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(VersionUtils.getPreviousVersion(LegacyESVersion.V_6_1_0));
|
||||
Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("foo.null").build();
|
||||
Settings.writeSettingsToStream(settings, output);
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
|
||||
assertEquals(6, in.readVInt());
|
||||
Map<String, String> keyValues = new HashMap<>();
|
||||
for (int i = 0; i < 6; i++) {
|
||||
keyValues.put(in.readString(), in.readOptionalString());
|
||||
}
|
||||
assertEquals(keyValues.get("foo.bar.0"), "0");
|
||||
assertEquals(keyValues.get("foo.bar.1"), "1");
|
||||
assertEquals(keyValues.get("foo.bar.2"), "2");
|
||||
assertEquals(keyValues.get("foo.bar.3"), "3");
|
||||
assertEquals(keyValues.get("foo.bar.baz"), "baz");
|
||||
assertTrue(keyValues.containsKey("foo.null"));
|
||||
assertNull(keyValues.get("foo.null"));
|
||||
|
||||
in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
|
||||
in.setVersion(output.getVersion());
|
||||
Settings readSettings = Settings.readSettingsFromStream(in);
|
||||
assertEquals(3, readSettings.size());
|
||||
assertEquals(Arrays.asList("0", "1", "2", "3"), readSettings.getAsList("foo.bar"));
|
||||
assertEquals(readSettings.get("foo.bar.baz"), "baz");
|
||||
assertTrue(readSettings.keySet().contains("foo.null"));
|
||||
assertNull(readSettings.get("foo.null"));
|
||||
}
|
||||
|
||||
public void testReadWriteArray() throws IOException {
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(randomFrom(Version.CURRENT, LegacyESVersion.V_6_1_0));
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.apache.lucene.analysis.reverse.ReverseStringFilter;
|
|||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
|
@ -206,7 +205,7 @@ public class AnalysisRegistryTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultIndexAnalyzerIsUnsupported() {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0_alpha1, Version.CURRENT);
|
||||
Version version = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
AnalyzerProvider<?> defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer());
|
||||
IllegalArgumentException e = expectThrows(
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.opensearch.indices.analysis.PreBuiltAnalyzers;
|
|||
import org.opensearch.plugins.Plugin;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.opensearch.test.InternalSettingsPlugin;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -76,26 +77,24 @@ public class PreBuiltAnalyzerTests extends OpenSearchSingleNodeTestCase {
|
|||
public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() {
|
||||
assertThat(
|
||||
PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT),
|
||||
is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(LegacyESVersion.V_6_0_0))
|
||||
is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT.minimumIndexCompatibilityVersion()))
|
||||
);
|
||||
}
|
||||
|
||||
public void testThatInstancesAreCachedAndReused() {
|
||||
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT), PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT));
|
||||
// same es version should be cached
|
||||
assertSame(
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(LegacyESVersion.V_6_2_1),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(LegacyESVersion.V_6_2_1)
|
||||
);
|
||||
// same opensearch version should be cached
|
||||
Version v = VersionUtils.randomVersion(random());
|
||||
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(v), PreBuiltAnalyzers.STANDARD.getAnalyzer(v));
|
||||
assertNotSame(
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(LegacyESVersion.V_6_0_0),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(LegacyESVersion.V_6_0_1)
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(VersionUtils.randomPreviousCompatibleVersion(random(), Version.CURRENT))
|
||||
);
|
||||
|
||||
// Same Lucene version should be cached:
|
||||
assertSame(
|
||||
PreBuiltAnalyzers.STOP.getAnalyzer(LegacyESVersion.V_6_2_1),
|
||||
PreBuiltAnalyzers.STOP.getAnalyzer(LegacyESVersion.V_6_2_2)
|
||||
PreBuiltAnalyzers.STOP.getAnalyzer(LegacyESVersion.fromId(6020199)),
|
||||
PreBuiltAnalyzers.STOP.getAnalyzer(LegacyESVersion.fromId(6020299))
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -32,9 +32,6 @@
|
|||
|
||||
package org.opensearch.index.mapper;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.compress.CompressedXContent;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
|
@ -42,9 +39,6 @@ import org.opensearch.common.xcontent.XContentFactory;
|
|||
import org.opensearch.index.IndexService;
|
||||
import org.opensearch.index.mapper.MapperService.MergeReason;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
public class AllFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
|
@ -53,65 +47,6 @@ public class AllFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
public void testAllDisabled() throws Exception {
|
||||
{
|
||||
final Version version = VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
LegacyESVersion.V_7_0_0.minimumCompatibilityVersion()
|
||||
);
|
||||
IndexService indexService = createIndex(
|
||||
"test_6x",
|
||||
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build()
|
||||
);
|
||||
String mappingDisabled = Strings.toString(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().endObject()
|
||||
);
|
||||
indexService.mapperService().merge("_doc", new CompressedXContent(mappingDisabled), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals("{\"_doc\":{\"_all\":{\"enabled\":false}}}", Strings.toString(indexService.mapperService().documentMapper()));
|
||||
|
||||
String mappingEnabled = Strings.toString(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().endObject()
|
||||
);
|
||||
MapperParsingException exc = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> indexService.mapperService().merge("_doc", new CompressedXContent(mappingEnabled), MergeReason.MAPPING_UPDATE)
|
||||
);
|
||||
assertThat(exc.getMessage(), containsString("[_all] is disabled in this version."));
|
||||
}
|
||||
{
|
||||
IndexService indexService = createIndex("test");
|
||||
String mappingEnabled = Strings.toString(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().endObject()
|
||||
);
|
||||
MapperParsingException exc = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> indexService.mapperService().merge("_doc", new CompressedXContent(mappingEnabled), MergeReason.MAPPING_UPDATE)
|
||||
);
|
||||
assertThat(exc.getMessage(), containsString("unsupported parameters: [_all"));
|
||||
|
||||
String mappingDisabled = Strings.toString(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().endObject()
|
||||
);
|
||||
exc = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> indexService.mapperService().merge("_doc", new CompressedXContent(mappingDisabled), MergeReason.MAPPING_UPDATE)
|
||||
);
|
||||
assertThat(exc.getMessage(), containsString("unsupported parameters: [_all"));
|
||||
|
||||
String mappingAll = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_all").endObject().endObject());
|
||||
exc = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> indexService.mapperService().merge("_doc", new CompressedXContent(mappingAll), MergeReason.MAPPING_UPDATE)
|
||||
);
|
||||
assertThat(exc.getMessage(), containsString("unsupported parameters: [_all"));
|
||||
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().endObject());
|
||||
indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals("{\"_doc\":{}}", indexService.mapperService().documentMapper("_doc").mapping().toString());
|
||||
}
|
||||
}
|
||||
|
||||
public void testUpdateDefaultSearchAnalyzer() throws Exception {
|
||||
IndexService indexService = createIndex(
|
||||
"test",
|
||||
|
|
|
@ -33,6 +33,7 @@
|
|||
package org.opensearch.index.mapper;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
|
@ -54,7 +55,7 @@ public class DynamicTemplateTests extends OpenSearchTestCase {
|
|||
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> DynamicTemplate.parse("my_template", templateDef, LegacyESVersion.V_6_0_0_alpha1)
|
||||
() -> DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion())
|
||||
);
|
||||
assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage());
|
||||
}
|
||||
|
@ -66,7 +67,7 @@ public class DynamicTemplateTests extends OpenSearchTestCase {
|
|||
// if a wrong match type is specified, we ignore the template
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> DynamicTemplate.parse("my_template", templateDef2, LegacyESVersion.V_6_0_0_alpha1)
|
||||
() -> DynamicTemplate.parse("my_template", templateDef2, Version.CURRENT.minimumIndexCompatibilityVersion())
|
||||
);
|
||||
assertEquals(
|
||||
"No field type matched on [text], possible values are [object, string, long, double, boolean, date, binary]",
|
||||
|
@ -93,7 +94,7 @@ public class DynamicTemplateTests extends OpenSearchTestCase {
|
|||
Map<String, Object> templateDef = new HashMap<>();
|
||||
templateDef.put("match_mapping_type", "*");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, LegacyESVersion.V_6_0_0_alpha1);
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion());
|
||||
assertTrue(template.match("a.b", "b", randomFrom(XContentFieldType.values())));
|
||||
}
|
||||
|
||||
|
@ -101,7 +102,7 @@ public class DynamicTemplateTests extends OpenSearchTestCase {
|
|||
Map<String, Object> templateDef = new HashMap<>();
|
||||
templateDef.put("match_mapping_type", "string");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, LegacyESVersion.V_6_0_0_alpha1);
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion());
|
||||
assertTrue(template.match("a.b", "b", XContentFieldType.STRING));
|
||||
assertFalse(template.match("a.b", "b", XContentFieldType.BOOLEAN));
|
||||
}
|
||||
|
@ -111,7 +112,7 @@ public class DynamicTemplateTests extends OpenSearchTestCase {
|
|||
Map<String, Object> templateDef = new HashMap<>();
|
||||
templateDef.put("match_mapping_type", "string");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, LegacyESVersion.V_6_0_0_alpha1);
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion());
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", Strings.toString(builder));
|
||||
|
@ -121,7 +122,7 @@ public class DynamicTemplateTests extends OpenSearchTestCase {
|
|||
templateDef.put("match", "*name");
|
||||
templateDef.put("unmatch", "first_name");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, LegacyESVersion.V_6_0_0_alpha1);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion());
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", Strings.toString(builder));
|
||||
|
@ -131,7 +132,7 @@ public class DynamicTemplateTests extends OpenSearchTestCase {
|
|||
templateDef.put("path_match", "*name");
|
||||
templateDef.put("path_unmatch", "first_name");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, LegacyESVersion.V_6_0_0_alpha1);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion());
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"path_match\":\"*name\",\"path_unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", Strings.toString(builder));
|
||||
|
@ -141,7 +142,7 @@ public class DynamicTemplateTests extends OpenSearchTestCase {
|
|||
templateDef.put("match", "^a$");
|
||||
templateDef.put("match_pattern", "regex");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, LegacyESVersion.V_6_0_0_alpha1);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion());
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", Strings.toString(builder));
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Modifications Copyright OpenSearch Contributors. See
|
||||
* GitHub history for details.
|
||||
*/
|
||||
|
||||
package org.opensearch.index.mapper;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
|
||||
public class LegacyTypeFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean forbidPrivateIndexSettings() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testDocValuesMultipleTypes() throws Exception {
|
||||
TypeFieldMapperTests.testDocValues(index -> {
|
||||
final Settings settings = Settings.builder()
|
||||
.put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), LegacyESVersion.V_6_0_0)
|
||||
.build();
|
||||
return this.createIndex(index, settings);
|
||||
});
|
||||
assertWarnings("[types removal] Using the _type field in queries and aggregations is deprecated, prefer to use a field instead.");
|
||||
}
|
||||
|
||||
}
|
|
@ -33,9 +33,6 @@
|
|||
package org.opensearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.compress.CompressedXContent;
|
||||
|
@ -48,7 +45,6 @@ import org.opensearch.index.mapper.ObjectMapper.Dynamic;
|
|||
import org.opensearch.plugins.Plugin;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.opensearch.test.InternalSettingsPlugin;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
|
@ -1135,75 +1131,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
return false;
|
||||
}
|
||||
|
||||
public void testReorderParentBWC() throws IOException {
|
||||
String mapping = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("nested1")
|
||||
.field("type", "nested")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
|
||||
Version bwcVersion = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_4_0);
|
||||
for (Version version : new Version[] { LegacyESVersion.V_6_5_0, bwcVersion }) {
|
||||
DocumentMapper docMapper = createIndex(
|
||||
"test-" + version,
|
||||
Settings.builder().put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), version).build()
|
||||
).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertThat(docMapper.hasNestedObjects(), equalTo(true));
|
||||
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
|
||||
assertThat(nested1Mapper.nested().isNested(), equalTo(true));
|
||||
|
||||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
.startArray("nested1")
|
||||
.startObject()
|
||||
.field("field1", "1")
|
||||
.field("field2", "2")
|
||||
.endObject()
|
||||
.startObject()
|
||||
.field("field1", "3")
|
||||
.field("field2", "4")
|
||||
.endObject()
|
||||
.endArray()
|
||||
.endObject()
|
||||
),
|
||||
XContentType.JSON
|
||||
)
|
||||
);
|
||||
|
||||
assertThat(doc.docs().size(), equalTo(3));
|
||||
if (version.onOrAfter(LegacyESVersion.V_6_5_0)) {
|
||||
assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
|
||||
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
|
||||
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
|
||||
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3"));
|
||||
assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4"));
|
||||
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
|
||||
} else {
|
||||
assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
|
||||
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("3"));
|
||||
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("4"));
|
||||
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("1"));
|
||||
assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("2"));
|
||||
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testMergeNestedMappings() throws IOException {
|
||||
MapperService mapperService = createIndex(
|
||||
"index1",
|
||||
|
|
|
@ -1,118 +0,0 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Modifications Copyright OpenSearch Contributors. See
|
||||
* GitHub history for details.
|
||||
*/
|
||||
|
||||
package org.opensearch.index.query;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.geo.ShapeRelation;
|
||||
import org.opensearch.common.geo.SpatialStrategy;
|
||||
import org.opensearch.common.geo.builders.ShapeBuilder;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.test.VersionUtils;
|
||||
import org.opensearch.test.geo.RandomShapeGenerator;
|
||||
import org.opensearch.test.geo.RandomShapeGenerator.ShapeType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class LegacyGeoShapeFieldQueryTests extends GeoShapeQueryBuilderTests {
|
||||
|
||||
@Override
|
||||
protected String fieldName() {
|
||||
return GEO_SHAPE_FIELD_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings createTestIndexSettings() {
|
||||
// force the legacy shape impl
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_5_0);
|
||||
return Settings.builder().put(super.createTestIndexSettings()).put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) {
|
||||
ShapeType shapeType = ShapeType.randomType(random());
|
||||
ShapeBuilder<?, ?, ?> shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType);
|
||||
GeoShapeQueryBuilder builder;
|
||||
clearShapeFields();
|
||||
if (indexedShape == false) {
|
||||
builder = new GeoShapeQueryBuilder(fieldName(), shape);
|
||||
} else {
|
||||
indexedShapeToReturn = shape;
|
||||
indexedShapeId = randomAlphaOfLengthBetween(3, 20);
|
||||
builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId);
|
||||
if (randomBoolean()) {
|
||||
indexedShapeIndex = randomAlphaOfLengthBetween(3, 20);
|
||||
builder.indexedShapeIndex(indexedShapeIndex);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
indexedShapePath = randomAlphaOfLengthBetween(3, 20);
|
||||
builder.indexedShapePath(indexedShapePath);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
indexedShapeRouting = randomAlphaOfLengthBetween(3, 20);
|
||||
builder.indexedShapeRouting(indexedShapeRouting);
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
SpatialStrategy strategy = randomFrom(SpatialStrategy.values());
|
||||
// ShapeType.MULTILINESTRING + SpatialStrategy.TERM can lead to large queries and will slow down tests, so
|
||||
// we try to avoid that combination
|
||||
while (shapeType == ShapeType.MULTILINESTRING && strategy == SpatialStrategy.TERM) {
|
||||
strategy = randomFrom(SpatialStrategy.values());
|
||||
}
|
||||
builder.strategy(strategy);
|
||||
if (strategy != SpatialStrategy.TERM) {
|
||||
builder.relation(randomFrom(ShapeRelation.values()));
|
||||
}
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
builder.ignoreUnmapped(randomBoolean());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public void testInvalidRelation() throws IOException {
|
||||
ShapeBuilder<?, ?, ?> shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
builder.strategy(SpatialStrategy.TERM);
|
||||
expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)));
|
||||
GeoShapeQueryBuilder builder2 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
builder2.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN));
|
||||
expectThrows(IllegalArgumentException.class, () -> builder2.strategy(SpatialStrategy.TERM));
|
||||
GeoShapeQueryBuilder builder3 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
builder3.strategy(SpatialStrategy.TERM);
|
||||
expectThrows(IllegalArgumentException.class, () -> builder3.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)));
|
||||
}
|
||||
}
|
|
@ -51,7 +51,6 @@ import org.apache.lucene.search.spans.SpanOrQuery;
|
|||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.index.search.SimpleQueryStringQueryParser;
|
||||
|
@ -274,11 +273,6 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
}
|
||||
|
||||
public void testDefaultFieldParsing() throws IOException {
|
||||
assumeTrue(
|
||||
"5.x behaves differently, so skip on non-6.x indices",
|
||||
indexSettings().getIndexVersionCreated().onOrAfter(LegacyESVersion.V_6_0_0_alpha1)
|
||||
);
|
||||
|
||||
String query = randomAlphaOfLengthBetween(1, 10).toLowerCase(Locale.ROOT);
|
||||
String contentString = "{\n" + " \"simple_query_string\" : {\n" + " \"query\" : \"" + query + "\"" + " }\n" + "}";
|
||||
SimpleQueryStringBuilder queryBuilder = (SimpleQueryStringBuilder) parseQuery(contentString);
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.indices;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.index.mapper.AllFieldMapper;
|
||||
import org.opensearch.index.mapper.DataStreamFieldMapper;
|
||||
|
@ -120,23 +119,11 @@ public class IndicesModuleTests extends OpenSearchTestCase {
|
|||
{
|
||||
Version version = VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
LegacyESVersion.V_7_0_0.minimumCompatibilityVersion()
|
||||
Version.CURRENT.minimumIndexCompatibilityVersion(),
|
||||
Version.CURRENT
|
||||
);
|
||||
assertFalse(module.getMapperRegistry().getMapperParsers().isEmpty());
|
||||
assertFalse(module.getMapperRegistry().getMetadataMapperParsers(version).isEmpty());
|
||||
Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = module.getMapperRegistry()
|
||||
.getMetadataMapperParsers(version);
|
||||
assertEquals(EXPECTED_METADATA_FIELDS_6x.length, metadataMapperParsers.size());
|
||||
int i = 0;
|
||||
for (String field : metadataMapperParsers.keySet()) {
|
||||
assertEquals(EXPECTED_METADATA_FIELDS_6x[i++], field);
|
||||
}
|
||||
}
|
||||
{
|
||||
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
|
||||
assertFalse(module.getMapperRegistry().getMapperParsers().isEmpty());
|
||||
assertFalse(module.getMapperRegistry().getMetadataMapperParsers(version).isEmpty());
|
||||
Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = module.getMapperRegistry()
|
||||
.getMetadataMapperParsers(version);
|
||||
assertEquals(EXPECTED_METADATA_FIELDS.length, metadataMapperParsers.size());
|
||||
|
@ -151,21 +138,14 @@ public class IndicesModuleTests extends OpenSearchTestCase {
|
|||
IndicesModule noPluginsModule = new IndicesModule(Collections.emptyList());
|
||||
IndicesModule module = new IndicesModule(fakePlugins);
|
||||
MapperRegistry registry = module.getMapperRegistry();
|
||||
Version version = VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
LegacyESVersion.V_6_0_0,
|
||||
LegacyESVersion.V_7_0_0.minimumCompatibilityVersion()
|
||||
);
|
||||
assertThat(registry.getMapperParsers().size(), greaterThan(noPluginsModule.getMapperRegistry().getMapperParsers().size()));
|
||||
assertThat(
|
||||
registry.getMetadataMapperParsers(version).size(),
|
||||
greaterThan(noPluginsModule.getMapperRegistry().getMetadataMapperParsers(version).size())
|
||||
registry.getMetadataMapperParsers(Version.CURRENT).size(),
|
||||
greaterThan(noPluginsModule.getMapperRegistry().getMetadataMapperParsers(Version.CURRENT).size())
|
||||
);
|
||||
Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = module.getMapperRegistry().getMetadataMapperParsers(version);
|
||||
Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = module.getMapperRegistry()
|
||||
.getMetadataMapperParsers(Version.CURRENT);
|
||||
Iterator<String> iterator = metadataMapperParsers.keySet().iterator();
|
||||
if (version.before(LegacyESVersion.V_7_0_0)) {
|
||||
assertEquals(AllFieldMapper.NAME, iterator.next());
|
||||
}
|
||||
assertEquals(IgnoredFieldMapper.NAME, iterator.next());
|
||||
String last = null;
|
||||
while (iterator.hasNext()) {
|
||||
|
|
|
@ -34,7 +34,6 @@ package org.opensearch.indices;
|
|||
import org.apache.lucene.search.similarities.BM25Similarity;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionListener;
|
||||
import org.opensearch.action.admin.indices.stats.CommonStatsFlags;
|
||||
|
@ -565,7 +564,7 @@ public class IndicesServiceTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testIsMetadataField() {
|
||||
IndicesService indicesService = getIndicesService();
|
||||
final Version randVersion = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, Version.CURRENT);
|
||||
final Version randVersion = VersionUtils.randomIndexCompatibleVersion(random());
|
||||
assertFalse(indicesService.isMetadataField(randVersion, randomAlphaOfLengthBetween(10, 15)));
|
||||
for (String builtIn : IndicesModule.getBuiltInMetadataFields()) {
|
||||
assertTrue(indicesService.isMetadataField(randVersion, builtIn));
|
||||
|
|
|
@ -148,10 +148,11 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
|
||||
public void testVersionedAnalyzers() throws Exception {
|
||||
String yaml = "/org/opensearch/index/analysis/test1.yml";
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
Settings settings2 = Settings.builder()
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml), false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, LegacyESVersion.V_6_0_0)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||
.build();
|
||||
AnalysisRegistry newRegistry = getNewRegistry(settings2);
|
||||
IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings2);
|
||||
|
@ -164,8 +165,8 @@ public class AnalysisModuleTests extends OpenSearchTestCase {
|
|||
|
||||
// analysis service has the expected version
|
||||
assertThat(indexAnalyzers.get("standard").analyzer(), is(instanceOf(StandardAnalyzer.class)));
|
||||
assertEquals(LegacyESVersion.V_6_0_0.luceneVersion, indexAnalyzers.get("standard").analyzer().getVersion());
|
||||
assertEquals(LegacyESVersion.V_6_0_0.luceneVersion, indexAnalyzers.get("stop").analyzer().getVersion());
|
||||
assertEquals(version.luceneVersion, indexAnalyzers.get("standard").analyzer().getVersion());
|
||||
assertEquals(version.luceneVersion, indexAnalyzers.get("stop").analyzer().getVersion());
|
||||
|
||||
assertThat(indexAnalyzers.get("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class)));
|
||||
assertEquals(org.apache.lucene.util.Version.fromBits(3, 6, 0), indexAnalyzers.get("custom7").analyzer().getVersion());
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.indices.recovery;
|
||||
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.cluster.node.DiscoveryNode;
|
||||
import org.opensearch.common.UUIDs;
|
||||
|
@ -92,11 +91,7 @@ public class StartRecoveryRequestTests extends OpenSearchTestCase {
|
|||
assertThat(outRequest.metadataSnapshot().asMap(), equalTo(inRequest.metadataSnapshot().asMap()));
|
||||
assertThat(outRequest.isPrimaryRelocation(), equalTo(inRequest.isPrimaryRelocation()));
|
||||
assertThat(outRequest.recoveryId(), equalTo(inRequest.recoveryId()));
|
||||
if (targetNodeVersion.onOrAfter(LegacyESVersion.V_6_0_0_alpha1)) {
|
||||
assertThat(outRequest.startingSeqNo(), equalTo(inRequest.startingSeqNo()));
|
||||
} else {
|
||||
assertThat(SequenceNumbers.UNASSIGNED_SEQ_NO, equalTo(inRequest.startingSeqNo()));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -709,7 +709,7 @@ public class PluginsServiceTests extends OpenSearchTestCase {
|
|||
"my_plugin",
|
||||
"desc",
|
||||
"1.0",
|
||||
LegacyESVersion.V_6_0_0,
|
||||
LegacyESVersion.fromId(6000099),
|
||||
"1.8",
|
||||
"FakePlugin",
|
||||
Collections.emptyList(),
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue