Remove unsupported Version.V_5_* (#32937)
This change removes the es 5x version constants and their usages.
This commit is contained in:
parent
a211d24bda
commit
f4e9729d64
|
@ -138,9 +138,8 @@ class VersionCollection {
|
|||
break
|
||||
}
|
||||
}
|
||||
// caveat 0 - now dip back 2 versions to get the last supported snapshot version of the line
|
||||
Version highestMinor = getHighestPreviousMinor(currentVersion.major - 1)
|
||||
maintenanceBugfixSnapshot = replaceAsSnapshot(highestMinor)
|
||||
// caveat 0 - the last supported snapshot of the line is on a version that we don't support (N-2)
|
||||
maintenanceBugfixSnapshot = null
|
||||
} else {
|
||||
// caveat 3 did not apply. version is not a X.0.0, so we are somewhere on a X.Y line
|
||||
// only check till minor == 0 of the major
|
||||
|
@ -293,7 +292,8 @@ class VersionCollection {
|
|||
* If you have a list [5.0.2, 5.1.2, 6.0.1, 6.1.1] and pass in 6 for the nextMajorVersion, it will return you 5.1.2
|
||||
*/
|
||||
private Version getHighestPreviousMinor(Integer nextMajorVersion) {
|
||||
return versionSet.headSet(Version.fromString("${nextMajorVersion}.0.0")).last()
|
||||
SortedSet<Version> result = versionSet.headSet(Version.fromString("${nextMajorVersion}.0.0"))
|
||||
return result.isEmpty() ? null : result.last()
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -26,7 +26,7 @@ class VersionCollectionTests extends GradleUnitTestCase {
|
|||
assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT"))
|
||||
assertEquals(vc.stagedMinorSnapshot, Version.fromString("6.2.0-SNAPSHOT"))
|
||||
assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.1.1-SNAPSHOT"))
|
||||
assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT"))
|
||||
assertNull(vc.maintenanceBugfixSnapshot)
|
||||
|
||||
vc.indexCompatible.containsAll(vc.versions)
|
||||
|
||||
|
@ -65,7 +65,7 @@ class VersionCollectionTests extends GradleUnitTestCase {
|
|||
assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT"))
|
||||
assertEquals(vc.stagedMinorSnapshot, null)
|
||||
assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.2.1-SNAPSHOT"))
|
||||
assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT"))
|
||||
assertNull(vc.maintenanceBugfixSnapshot)
|
||||
|
||||
vc.indexCompatible.containsAll(vc.versions)
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
public void testNGramNoDeprecationWarningPre6_4() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_3_0))
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_3_0))
|
||||
.build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
@ -104,7 +104,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
public void testEdgeNGramNoDeprecationWarningPre6_4() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_3_0))
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_3_0))
|
||||
.build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
|
|
@ -60,7 +60,7 @@ public class HtmlStripCharFilterFactoryTests extends ESTestCase {
|
|||
public void testNoDeprecationWarningPre6_3() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_2_4))
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_2_4))
|
||||
.build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.script.mustache;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.CompositeIndicesRequest;
|
||||
|
@ -120,21 +119,17 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
maxConcurrentSearchRequests = in.readVInt();
|
||||
}
|
||||
maxConcurrentSearchRequests = in.readVInt();
|
||||
requests = in.readStreamableList(SearchTemplateRequest::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
out.writeVInt(maxConcurrentSearchRequests);
|
||||
}
|
||||
out.writeVInt(maxConcurrentSearchRequests);
|
||||
out.writeStreamableList(requests);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
@ -148,9 +143,9 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi
|
|||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(maxConcurrentSearchRequests, requests, indicesOptions);
|
||||
}
|
||||
|
||||
public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearchTemplateRequest,
|
||||
}
|
||||
|
||||
public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearchTemplateRequest,
|
||||
XContent xContent) throws IOException {
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
for (SearchTemplateRequest templateRequest : multiSearchTemplateRequest.requests()) {
|
||||
|
@ -168,5 +163,5 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi
|
|||
}
|
||||
return output.toByteArray();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.join.JoinUtil;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -125,15 +124,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
out.writeInt(maxChildren);
|
||||
out.writeVInt(scoreMode.ordinal());
|
||||
out.writeNamedWriteable(query);
|
||||
if (out.getVersion().before(Version.V_5_5_0)) {
|
||||
final boolean hasInnerHit = innerHitBuilder != null;
|
||||
out.writeBoolean(hasInnerHit);
|
||||
if (hasInnerHit) {
|
||||
innerHitBuilder.writeToParentChildBWC(out, query, type);
|
||||
}
|
||||
} else {
|
||||
out.writeOptionalWriteable(innerHitBuilder);
|
||||
}
|
||||
out.writeOptionalWriteable(innerHitBuilder);
|
||||
out.writeBoolean(ignoreUnmapped);
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.join.query;
|
|||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -97,15 +96,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
out.writeString(type);
|
||||
out.writeBoolean(score);
|
||||
out.writeNamedWriteable(query);
|
||||
if (out.getVersion().before(Version.V_5_5_0)) {
|
||||
final boolean hasInnerHit = innerHitBuilder != null;
|
||||
out.writeBoolean(hasInnerHit);
|
||||
if (hasInnerHit) {
|
||||
innerHitBuilder.writeToParentChildBWC(out, query, type);
|
||||
}
|
||||
} else {
|
||||
out.writeOptionalWriteable(innerHitBuilder);
|
||||
}
|
||||
out.writeOptionalWriteable(innerHitBuilder);
|
||||
out.writeBoolean(ignoreUnmapped);
|
||||
}
|
||||
|
||||
|
|
|
@ -196,10 +196,6 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
public void testSerializationBWC() throws IOException {
|
||||
for (Version version : VersionUtils.allReleasedVersions()) {
|
||||
HasChildQueryBuilder testQuery = createTestQueryBuilder();
|
||||
if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) {
|
||||
// ignore unmapped for inner_hits has been added on 5.2
|
||||
testQuery.innerHit().setIgnoreUnmapped(false);
|
||||
}
|
||||
assertSerialization(testQuery, version);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -171,10 +171,6 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
public void testSerializationBWC() throws IOException {
|
||||
for (Version version : VersionUtils.allReleasedVersions()) {
|
||||
HasParentQueryBuilder testQuery = createTestQueryBuilder();
|
||||
if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) {
|
||||
// ignore unmapped for inner_hits has been added on 5.2
|
||||
testQuery.innerHit().setIgnoreUnmapped(false);
|
||||
}
|
||||
assertSerialization(testQuery, version);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -272,11 +272,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
documents = document != null ? Collections.singletonList(document) : Collections.emptyList();
|
||||
}
|
||||
if (documents.isEmpty() == false) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
documentXContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
documentXContentType = XContentHelper.xContentType(documents.iterator().next());
|
||||
}
|
||||
documentXContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
documentXContentType = null;
|
||||
}
|
||||
|
@ -329,7 +325,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
BytesReference doc = documents.isEmpty() ? null : documents.iterator().next();
|
||||
out.writeOptionalBytesReference(doc);
|
||||
}
|
||||
if (documents.isEmpty() == false && out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
if (documents.isEmpty() == false) {
|
||||
out.writeEnum(documentXContentType);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
|
@ -36,7 +35,6 @@ import org.elasticsearch.common.bytes.BytesArray;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -57,7 +55,6 @@ import java.io.IOException;
|
|||
import java.io.UncheckedIOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
|
@ -294,26 +291,6 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
|
|||
assertThat(result.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.MUST_NOT));
|
||||
}
|
||||
|
||||
public void testSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("P4AAAAAFZmllbGQEdHlwZQAAAAAAAA57ImZvbyI6ImJhciJ9AAAAAA==");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
PercolateQueryBuilder queryBuilder = new PercolateQueryBuilder(in);
|
||||
assertEquals("type", queryBuilder.getDocumentType());
|
||||
assertEquals("field", queryBuilder.getField());
|
||||
assertEquals("{\"foo\":\"bar\"}", queryBuilder.getDocuments().iterator().next().utf8ToString());
|
||||
assertEquals(XContentType.JSON, queryBuilder.getXContentType());
|
||||
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
queryBuilder.writeTo(out);
|
||||
assertArrayEquals(data, out.bytes().toBytesRef().bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static BytesReference randomSource(Set<String> usedFields) {
|
||||
try {
|
||||
// If we create two source that have the same field, but these fields have different kind of values (str vs. lng) then
|
||||
|
|
|
@ -74,7 +74,7 @@ public class QueryBuilderStoreTests extends ESTestCase {
|
|||
BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder(
|
||||
new Mapper.BuilderContext(settings, new ContentPath(0)));
|
||||
|
||||
Version version = randomBoolean() ? Version.V_5_6_0 : Version.V_6_0_0_beta2;
|
||||
Version version = Version.V_6_0_0_beta2;
|
||||
try (IndexWriter indexWriter = new IndexWriter(directory, config)) {
|
||||
for (int i = 0; i < queryBuilders.length; i++) {
|
||||
queryBuilders[i] = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(8));
|
||||
|
|
|
@ -61,7 +61,8 @@ final class RemoteRequestBuilders {
|
|||
|
||||
if (searchRequest.scroll() != null) {
|
||||
TimeValue keepAlive = searchRequest.scroll().keepAlive();
|
||||
if (remoteVersion.before(Version.V_5_0_0)) {
|
||||
// V_5_0_0
|
||||
if (remoteVersion.before(Version.fromId(5000099))) {
|
||||
/* Versions of Elasticsearch before 5.0 couldn't parse nanos or micros
|
||||
* so we toss out that resolution, rounding up because more scroll
|
||||
* timeout seems safer than less. */
|
||||
|
@ -117,7 +118,8 @@ final class RemoteRequestBuilders {
|
|||
for (int i = 1; i < searchRequest.source().storedFields().fieldNames().size(); i++) {
|
||||
fields.append(',').append(searchRequest.source().storedFields().fieldNames().get(i));
|
||||
}
|
||||
String storedFieldsParamName = remoteVersion.before(Version.V_5_0_0_alpha4) ? "fields" : "stored_fields";
|
||||
// V_5_0_0
|
||||
String storedFieldsParamName = remoteVersion.before(Version.fromId(5000099)) ? "fields" : "stored_fields";
|
||||
request.addParameter(storedFieldsParamName, fields.toString());
|
||||
}
|
||||
|
||||
|
@ -186,7 +188,8 @@ final class RemoteRequestBuilders {
|
|||
static Request scroll(String scroll, TimeValue keepAlive, Version remoteVersion) {
|
||||
Request request = new Request("POST", "/_search/scroll");
|
||||
|
||||
if (remoteVersion.before(Version.V_5_0_0)) {
|
||||
// V_5_0_0
|
||||
if (remoteVersion.before(Version.fromId(5000099))) {
|
||||
/* Versions of Elasticsearch before 5.0 couldn't parse nanos or micros
|
||||
* so we toss out that resolution, rounding up so we shouldn't end up
|
||||
* with 0s. */
|
||||
|
|
|
@ -155,13 +155,8 @@ public class RoundTripTests extends ESTestCase {
|
|||
assertEquals(request.getRemoteInfo().getUsername(), tripped.getRemoteInfo().getUsername());
|
||||
assertEquals(request.getRemoteInfo().getPassword(), tripped.getRemoteInfo().getPassword());
|
||||
assertEquals(request.getRemoteInfo().getHeaders(), tripped.getRemoteInfo().getHeaders());
|
||||
if (version.onOrAfter(Version.V_5_2_0)) {
|
||||
assertEquals(request.getRemoteInfo().getSocketTimeout(), tripped.getRemoteInfo().getSocketTimeout());
|
||||
assertEquals(request.getRemoteInfo().getConnectTimeout(), tripped.getRemoteInfo().getConnectTimeout());
|
||||
} else {
|
||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, tripped.getRemoteInfo().getSocketTimeout());
|
||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, tripped.getRemoteInfo().getConnectTimeout());
|
||||
}
|
||||
assertEquals(request.getRemoteInfo().getSocketTimeout(), tripped.getRemoteInfo().getSocketTimeout());
|
||||
assertEquals(request.getRemoteInfo().getConnectTimeout(), tripped.getRemoteInfo().getConnectTimeout());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -136,13 +136,15 @@ public class RemoteRequestBuildersTests extends ESTestCase {
|
|||
// Test stored_fields for versions that support it
|
||||
searchRequest = new SearchRequest().source(new SearchSourceBuilder());
|
||||
searchRequest.source().storedField("_source").storedField("_id");
|
||||
remoteVersion = Version.fromId(between(Version.V_5_0_0_alpha4_ID, Version.CURRENT.id));
|
||||
// V_5_0_0_alpha4 => current
|
||||
remoteVersion = Version.fromId(between(5000004, Version.CURRENT.id));
|
||||
assertThat(initialSearch(searchRequest, query, remoteVersion).getParameters(), hasEntry("stored_fields", "_source,_id"));
|
||||
|
||||
// Test fields for versions that support it
|
||||
searchRequest = new SearchRequest().source(new SearchSourceBuilder());
|
||||
searchRequest.source().storedField("_source").storedField("_id");
|
||||
remoteVersion = Version.fromId(between(2000099, Version.V_5_0_0_alpha4_ID - 1));
|
||||
// V_2_0_0 => V_5_0_0_alpha3
|
||||
remoteVersion = Version.fromId(between(2000099, 5000003));
|
||||
assertThat(initialSearch(searchRequest, query, remoteVersion).getParameters(), hasEntry("fields", "_source,_id"));
|
||||
|
||||
// Test extra fields for versions that need it
|
||||
|
@ -190,7 +192,8 @@ public class RemoteRequestBuildersTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void assertScroll(Version remoteVersion, Map<String, String> params, TimeValue requested) {
|
||||
if (remoteVersion.before(Version.V_5_0_0)) {
|
||||
// V_5_0_0
|
||||
if (remoteVersion.before(Version.fromId(5000099))) {
|
||||
// Versions of Elasticsearch prior to 5.0 can't parse nanos or micros in TimeValue.
|
||||
assertThat(params.get("scroll"), not(either(endsWith("nanos")).or(endsWith("micros"))));
|
||||
if (requested.getStringRep().endsWith("nanos") || requested.getStringRep().endsWith("micros")) {
|
||||
|
@ -242,7 +245,7 @@ public class RemoteRequestBuildersTests extends ESTestCase {
|
|||
|
||||
public void testScrollEntity() throws IOException {
|
||||
String scroll = randomAlphaOfLength(30);
|
||||
HttpEntity entity = scroll(scroll, timeValueMillis(between(1, 1000)), Version.V_5_0_0).getEntity();
|
||||
HttpEntity entity = scroll(scroll, timeValueMillis(between(1, 1000)), Version.fromString("5.0.0")).getEntity();
|
||||
assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue());
|
||||
assertThat(Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)),
|
||||
containsString("\"" + scroll + "\""));
|
||||
|
@ -255,7 +258,7 @@ public class RemoteRequestBuildersTests extends ESTestCase {
|
|||
|
||||
public void testClearScroll() throws IOException {
|
||||
String scroll = randomAlphaOfLength(30);
|
||||
Request request = clearScroll(scroll, Version.V_5_0_0);
|
||||
Request request = clearScroll(scroll, Version.fromString("5.0.0"));
|
||||
assertEquals(ContentType.APPLICATION_JSON.toString(), request.getEntity().getContentType().getValue());
|
||||
assertThat(Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8)),
|
||||
containsString("\"" + scroll + "\""));
|
||||
|
|
|
@ -150,13 +150,15 @@ public class RemoteScrollableHitSourceTests extends ESTestCase {
|
|||
assertTrue(called.get());
|
||||
called.set(false);
|
||||
sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/5_0_0_alpha_3.json").lookupRemoteVersion(v -> {
|
||||
assertEquals(Version.V_5_0_0_alpha3, v);
|
||||
// V_5_0_0_alpha3
|
||||
assertEquals(Version.fromId(5000003), v);
|
||||
called.set(true);
|
||||
});
|
||||
assertTrue(called.get());
|
||||
called.set(false);
|
||||
sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/with_unknown_fields.json").lookupRemoteVersion(v -> {
|
||||
assertEquals(Version.V_5_0_0_alpha3, v);
|
||||
// V_5_0_0_alpha3
|
||||
assertEquals(Version.fromId(5000003), v);
|
||||
called.set(true);
|
||||
});
|
||||
assertTrue(called.get());
|
||||
|
|
|
@ -25,7 +25,6 @@ import com.ibm.icu.text.RuleBasedCollator;
|
|||
import com.ibm.icu.util.ULocale;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
|
@ -35,7 +34,6 @@ import org.apache.lucene.search.MultiTermQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -56,7 +54,6 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
||||
|
@ -571,7 +568,6 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
|||
private final String variableTop;
|
||||
private final boolean hiraganaQuaternaryMode;
|
||||
private final Collator collator;
|
||||
private final BiFunction<String, BytesRef, Field> getDVField;
|
||||
|
||||
protected ICUCollationKeywordFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo, String rules, String language,
|
||||
|
@ -593,11 +589,6 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
|||
this.variableTop = variableTop;
|
||||
this.hiraganaQuaternaryMode = hiraganaQuaternaryMode;
|
||||
this.collator = collator;
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_5_6_0)) {
|
||||
getDVField = SortedSetDocValuesField::new;
|
||||
} else {
|
||||
getDVField = SortedDocValuesField::new;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -754,7 +745,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
if (fieldType().hasDocValues()) {
|
||||
fields.add(getDVField.apply(fieldType().name(), binaryValue));
|
||||
fields.add(new SortedSetDocValuesField(fieldType().name(), binaryValue));
|
||||
} else if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
createFieldNamesField(context, fields);
|
||||
}
|
||||
|
|
|
@ -28,11 +28,9 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.IndexableFieldType;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
|
@ -106,50 +104,6 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertEquals(DocValuesType.SORTED_SET, fieldType.docValuesType());
|
||||
}
|
||||
|
||||
public void testBackCompat() throws Exception {
|
||||
indexService = createIndex("oldindex", Settings.builder().put("index.version.created", Version.V_5_5_0).build());
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", BytesReference
|
||||
.bytes(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject()),
|
||||
XContentType.JSON));
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
|
||||
Collator collator = Collator.getInstance(ULocale.ROOT);
|
||||
RawCollationKey key = collator.getRawCollationKey("1234", null);
|
||||
BytesRef expected = new BytesRef(key.bytes, 0, key.size);
|
||||
|
||||
assertEquals(expected, fields[0].binaryValue());
|
||||
IndexableFieldType fieldType = fields[0].fieldType();
|
||||
assertThat(fieldType.omitNorms(), equalTo(true));
|
||||
assertFalse(fieldType.tokenized());
|
||||
assertFalse(fieldType.stored());
|
||||
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS));
|
||||
assertThat(fieldType.storeTermVectors(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
|
||||
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
|
||||
assertEquals(DocValuesType.NONE, fieldType.docValuesType());
|
||||
|
||||
assertEquals(expected, fields[1].binaryValue());
|
||||
fieldType = fields[1].fieldType();
|
||||
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.NONE));
|
||||
assertEquals(DocValuesType.SORTED, fieldType.docValuesType());
|
||||
}
|
||||
|
||||
public void testNullValue() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexableField;
|
|||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -93,10 +92,6 @@ public class Murmur3FieldMapper extends FieldMapper {
|
|||
throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]");
|
||||
}
|
||||
|
||||
if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) {
|
||||
node.remove("precision_step");
|
||||
}
|
||||
|
||||
TypeParsers.parseField(builder, name, node, parserContext);
|
||||
|
||||
return builder;
|
||||
|
|
|
@ -82,10 +82,6 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public SizeFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
if (context.indexCreatedVersion().onOrBefore(Version.V_5_0_0_alpha4)) {
|
||||
// Make sure that the doc_values are disabled on indices created before V_5_0_0_alpha4
|
||||
fieldType.setHasDocValues(false);
|
||||
}
|
||||
return new SizeFieldMapper(enabledState, fieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -908,9 +908,6 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
private void checkSnapshot(String snapshotName, int count, Version tookOnVersion) throws IOException {
|
||||
// Check the snapshot metadata, especially the version
|
||||
Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName);
|
||||
if (false == (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0))) {
|
||||
listSnapshotRequest.addParameter("verbose", "true");
|
||||
}
|
||||
Map<String, Object> listSnapshotResponse = entityAsMap(client().performRequest(listSnapshotRequest));
|
||||
assertEquals(singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", listSnapshotResponse));
|
||||
assertEquals(singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", listSnapshotResponse));
|
||||
|
|
|
@ -44,7 +44,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -137,17 +136,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
super(in.readOptionalString(), in.readException());
|
||||
readStackTrace(this, in);
|
||||
headers.putAll(in.readMapOfLists(StreamInput::readString, StreamInput::readString));
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
metadata.putAll(in.readMapOfLists(StreamInput::readString, StreamInput::readString));
|
||||
} else {
|
||||
for (Iterator<Map.Entry<String, List<String>>> iterator = headers.entrySet().iterator(); iterator.hasNext(); ) {
|
||||
Map.Entry<String, List<String>> header = iterator.next();
|
||||
if (header.getKey().startsWith("es.")) {
|
||||
metadata.put(header.getKey(), header.getValue());
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
metadata.putAll(in.readMapOfLists(StreamInput::readString, StreamInput::readString));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -287,15 +276,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
out.writeOptionalString(this.getMessage());
|
||||
out.writeException(this.getCause());
|
||||
writeStackTraces(this, out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeMapOfLists(headers, StreamOutput::writeString, StreamOutput::writeString);
|
||||
out.writeMapOfLists(metadata, StreamOutput::writeString, StreamOutput::writeString);
|
||||
} else {
|
||||
Map<String, List<String>> finalHeaders = new HashMap<>(headers.size() + metadata.size());
|
||||
finalHeaders.putAll(headers);
|
||||
finalHeaders.putAll(metadata);
|
||||
out.writeMapOfLists(finalHeaders, StreamOutput::writeString, StreamOutput::writeString);
|
||||
}
|
||||
out.writeMapOfLists(headers, StreamOutput::writeString, StreamOutput::writeString);
|
||||
out.writeMapOfLists(metadata, StreamOutput::writeString, StreamOutput::writeString);
|
||||
}
|
||||
|
||||
public static ElasticsearchException readException(StreamInput input, int id) throws IOException {
|
||||
|
@ -1018,11 +1000,11 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
STATUS_EXCEPTION(org.elasticsearch.ElasticsearchStatusException.class, org.elasticsearch.ElasticsearchStatusException::new, 145,
|
||||
UNKNOWN_VERSION_ADDED),
|
||||
TASK_CANCELLED_EXCEPTION(org.elasticsearch.tasks.TaskCancelledException.class,
|
||||
org.elasticsearch.tasks.TaskCancelledException::new, 146, Version.V_5_1_1),
|
||||
org.elasticsearch.tasks.TaskCancelledException::new, 146, UNKNOWN_VERSION_ADDED),
|
||||
SHARD_LOCK_OBTAIN_FAILED_EXCEPTION(org.elasticsearch.env.ShardLockObtainFailedException.class,
|
||||
org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2),
|
||||
org.elasticsearch.env.ShardLockObtainFailedException::new, 147, UNKNOWN_VERSION_ADDED),
|
||||
UNKNOWN_NAMED_OBJECT_EXCEPTION(org.elasticsearch.common.xcontent.UnknownNamedObjectException.class,
|
||||
org.elasticsearch.common.xcontent.UnknownNamedObjectException::new, 148, Version.V_5_2_0),
|
||||
org.elasticsearch.common.xcontent.UnknownNamedObjectException::new, 148, UNKNOWN_VERSION_ADDED),
|
||||
TOO_MANY_BUCKETS_EXCEPTION(MultiBucketConsumerService.TooManyBucketsException.class,
|
||||
MultiBucketConsumerService.TooManyBucketsException::new, 149,
|
||||
Version.V_7_0_0_alpha1);
|
||||
|
|
|
@ -43,87 +43,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
* values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99
|
||||
* indicating a release the (internal) format of the id is there so we can easily do after/before checks on the id
|
||||
*/
|
||||
public static final int V_5_0_0_alpha1_ID = 5000001;
|
||||
public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final int V_5_0_0_alpha2_ID = 5000002;
|
||||
public static final Version V_5_0_0_alpha2 = new Version(V_5_0_0_alpha2_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final int V_5_0_0_alpha3_ID = 5000003;
|
||||
public static final Version V_5_0_0_alpha3 = new Version(V_5_0_0_alpha3_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final int V_5_0_0_alpha4_ID = 5000004;
|
||||
public static final Version V_5_0_0_alpha4 = new Version(V_5_0_0_alpha4_ID, org.apache.lucene.util.Version.LUCENE_6_1_0);
|
||||
public static final int V_5_0_0_alpha5_ID = 5000005;
|
||||
public static final Version V_5_0_0_alpha5 = new Version(V_5_0_0_alpha5_ID, org.apache.lucene.util.Version.LUCENE_6_1_0);
|
||||
public static final int V_5_0_0_beta1_ID = 5000026;
|
||||
public static final Version V_5_0_0_beta1 = new Version(V_5_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
|
||||
public static final int V_5_0_0_rc1_ID = 5000051;
|
||||
public static final Version V_5_0_0_rc1 = new Version(V_5_0_0_rc1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
|
||||
public static final int V_5_0_0_ID = 5000099;
|
||||
public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
|
||||
public static final int V_5_0_1_ID = 5000199;
|
||||
public static final Version V_5_0_1 = new Version(V_5_0_1_ID, org.apache.lucene.util.Version.LUCENE_6_2_1);
|
||||
public static final int V_5_0_2_ID = 5000299;
|
||||
public static final Version V_5_0_2 = new Version(V_5_0_2_ID, org.apache.lucene.util.Version.LUCENE_6_2_1);
|
||||
// no version constant for 5.1.0 due to inadvertent release
|
||||
public static final int V_5_1_1_ID = 5010199;
|
||||
public static final Version V_5_1_1 = new Version(V_5_1_1_ID, org.apache.lucene.util.Version.LUCENE_6_3_0);
|
||||
public static final int V_5_1_2_ID = 5010299;
|
||||
public static final Version V_5_1_2 = new Version(V_5_1_2_ID, org.apache.lucene.util.Version.LUCENE_6_3_0);
|
||||
public static final int V_5_2_0_ID = 5020099;
|
||||
public static final Version V_5_2_0 = new Version(V_5_2_0_ID, org.apache.lucene.util.Version.LUCENE_6_4_0);
|
||||
public static final int V_5_2_1_ID = 5020199;
|
||||
public static final Version V_5_2_1 = new Version(V_5_2_1_ID, org.apache.lucene.util.Version.LUCENE_6_4_1);
|
||||
public static final int V_5_2_2_ID = 5020299;
|
||||
public static final Version V_5_2_2 = new Version(V_5_2_2_ID, org.apache.lucene.util.Version.LUCENE_6_4_1);
|
||||
public static final int V_5_3_0_ID = 5030099;
|
||||
public static final Version V_5_3_0 = new Version(V_5_3_0_ID, org.apache.lucene.util.Version.LUCENE_6_4_1);
|
||||
public static final int V_5_3_1_ID = 5030199;
|
||||
public static final Version V_5_3_1 = new Version(V_5_3_1_ID, org.apache.lucene.util.Version.LUCENE_6_4_2);
|
||||
public static final int V_5_3_2_ID = 5030299;
|
||||
public static final Version V_5_3_2 = new Version(V_5_3_2_ID, org.apache.lucene.util.Version.LUCENE_6_4_2);
|
||||
public static final int V_5_3_3_ID = 5030399;
|
||||
public static final Version V_5_3_3 = new Version(V_5_3_3_ID, org.apache.lucene.util.Version.LUCENE_6_4_2);
|
||||
public static final int V_5_4_0_ID = 5040099;
|
||||
public static final Version V_5_4_0 = new Version(V_5_4_0_ID, org.apache.lucene.util.Version.LUCENE_6_5_0);
|
||||
public static final int V_5_4_1_ID = 5040199;
|
||||
public static final Version V_5_4_1 = new Version(V_5_4_1_ID, org.apache.lucene.util.Version.LUCENE_6_5_1);
|
||||
public static final int V_5_4_2_ID = 5040299;
|
||||
public static final Version V_5_4_2 = new Version(V_5_4_2_ID, org.apache.lucene.util.Version.LUCENE_6_5_1);
|
||||
public static final int V_5_4_3_ID = 5040399;
|
||||
public static final Version V_5_4_3 = new Version(V_5_4_3_ID, org.apache.lucene.util.Version.LUCENE_6_5_1);
|
||||
public static final int V_5_5_0_ID = 5050099;
|
||||
public static final Version V_5_5_0 = new Version(V_5_5_0_ID, org.apache.lucene.util.Version.LUCENE_6_6_0);
|
||||
public static final int V_5_5_1_ID = 5050199;
|
||||
public static final Version V_5_5_1 = new Version(V_5_5_1_ID, org.apache.lucene.util.Version.LUCENE_6_6_0);
|
||||
public static final int V_5_5_2_ID = 5050299;
|
||||
public static final Version V_5_5_2 = new Version(V_5_5_2_ID, org.apache.lucene.util.Version.LUCENE_6_6_0);
|
||||
public static final int V_5_5_3_ID = 5050399;
|
||||
public static final Version V_5_5_3 = new Version(V_5_5_3_ID, org.apache.lucene.util.Version.LUCENE_6_6_0);
|
||||
public static final int V_5_6_0_ID = 5060099;
|
||||
public static final Version V_5_6_0 = new Version(V_5_6_0_ID, org.apache.lucene.util.Version.LUCENE_6_6_0);
|
||||
public static final int V_5_6_1_ID = 5060199;
|
||||
public static final Version V_5_6_1 = new Version(V_5_6_1_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_2_ID = 5060299;
|
||||
public static final Version V_5_6_2 = new Version(V_5_6_2_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_3_ID = 5060399;
|
||||
public static final Version V_5_6_3 = new Version(V_5_6_3_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_4_ID = 5060499;
|
||||
public static final Version V_5_6_4 = new Version(V_5_6_4_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_5_ID = 5060599;
|
||||
public static final Version V_5_6_5 = new Version(V_5_6_5_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_6_ID = 5060699;
|
||||
public static final Version V_5_6_6 = new Version(V_5_6_6_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_7_ID = 5060799;
|
||||
public static final Version V_5_6_7 = new Version(V_5_6_7_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_8_ID = 5060899;
|
||||
public static final Version V_5_6_8 = new Version(V_5_6_8_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_9_ID = 5060999;
|
||||
public static final Version V_5_6_9 = new Version(V_5_6_9_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_10_ID = 5061099;
|
||||
public static final Version V_5_6_10 = new Version(V_5_6_10_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_11_ID = 5061199;
|
||||
public static final Version V_5_6_11 = new Version(V_5_6_11_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_5_6_12_ID = 5061299;
|
||||
public static final Version V_5_6_12 = new Version(V_5_6_12_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
|
||||
public static final int V_6_0_0_alpha1_ID = 6000001;
|
||||
public static final Version V_6_0_0_alpha1 =
|
||||
new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0);
|
||||
|
@ -248,86 +167,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
return V_6_0_0_alpha2;
|
||||
case V_6_0_0_alpha1_ID:
|
||||
return V_6_0_0_alpha1;
|
||||
case V_5_6_12_ID:
|
||||
return V_5_6_12;
|
||||
case V_5_6_11_ID:
|
||||
return V_5_6_11;
|
||||
case V_5_6_10_ID:
|
||||
return V_5_6_10;
|
||||
case V_5_6_9_ID:
|
||||
return V_5_6_9;
|
||||
case V_5_6_8_ID:
|
||||
return V_5_6_8;
|
||||
case V_5_6_7_ID:
|
||||
return V_5_6_7;
|
||||
case V_5_6_6_ID:
|
||||
return V_5_6_6;
|
||||
case V_5_6_5_ID:
|
||||
return V_5_6_5;
|
||||
case V_5_6_4_ID:
|
||||
return V_5_6_4;
|
||||
case V_5_6_3_ID:
|
||||
return V_5_6_3;
|
||||
case V_5_6_2_ID:
|
||||
return V_5_6_2;
|
||||
case V_5_6_1_ID:
|
||||
return V_5_6_1;
|
||||
case V_5_6_0_ID:
|
||||
return V_5_6_0;
|
||||
case V_5_5_3_ID:
|
||||
return V_5_5_3;
|
||||
case V_5_5_2_ID:
|
||||
return V_5_5_2;
|
||||
case V_5_5_1_ID:
|
||||
return V_5_5_1;
|
||||
case V_5_5_0_ID:
|
||||
return V_5_5_0;
|
||||
case V_5_4_3_ID:
|
||||
return V_5_4_3;
|
||||
case V_5_4_2_ID:
|
||||
return V_5_4_2;
|
||||
case V_5_4_1_ID:
|
||||
return V_5_4_1;
|
||||
case V_5_4_0_ID:
|
||||
return V_5_4_0;
|
||||
case V_5_3_3_ID:
|
||||
return V_5_3_3;
|
||||
case V_5_3_2_ID:
|
||||
return V_5_3_2;
|
||||
case V_5_3_1_ID:
|
||||
return V_5_3_1;
|
||||
case V_5_3_0_ID:
|
||||
return V_5_3_0;
|
||||
case V_5_2_2_ID:
|
||||
return V_5_2_2;
|
||||
case V_5_2_1_ID:
|
||||
return V_5_2_1;
|
||||
case V_5_2_0_ID:
|
||||
return V_5_2_0;
|
||||
case V_5_1_2_ID:
|
||||
return V_5_1_2;
|
||||
case V_5_1_1_ID:
|
||||
return V_5_1_1;
|
||||
case V_5_0_2_ID:
|
||||
return V_5_0_2;
|
||||
case V_5_0_1_ID:
|
||||
return V_5_0_1;
|
||||
case V_5_0_0_ID:
|
||||
return V_5_0_0;
|
||||
case V_5_0_0_rc1_ID:
|
||||
return V_5_0_0_rc1;
|
||||
case V_5_0_0_beta1_ID:
|
||||
return V_5_0_0_beta1;
|
||||
case V_5_0_0_alpha5_ID:
|
||||
return V_5_0_0_alpha5;
|
||||
case V_5_0_0_alpha4_ID:
|
||||
return V_5_0_0_alpha4;
|
||||
case V_5_0_0_alpha3_ID:
|
||||
return V_5_0_0_alpha3;
|
||||
case V_5_0_0_alpha2_ID:
|
||||
return V_5_0_0_alpha2;
|
||||
case V_5_0_0_alpha1_ID:
|
||||
return V_5_0_0_alpha1;
|
||||
default:
|
||||
return new Version(id, org.apache.lucene.util.Version.LATEST);
|
||||
}
|
||||
|
@ -477,8 +316,11 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
* is a beta or RC release then the version itself is returned.
|
||||
*/
|
||||
public Version minimumCompatibilityVersion() {
|
||||
if (major >= 6) {
|
||||
// all major versions from 6 onwards are compatible with last minor series of the previous major
|
||||
if (major == 6) {
|
||||
// force the minimum compatibility for version 6 to 5.6 since we don't reference version 5 anymore
|
||||
return Version.fromId(5060099);
|
||||
} else if (major >= 7) {
|
||||
// all major versions from 7 onwards are compatible with last minor series of the previous major
|
||||
Version bwcVersion = null;
|
||||
|
||||
for (int i = DeclaredVersionsHolder.DECLARED_VERSIONS.size() - 1; i >= 0; i--) {
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.admin.cluster.allocation;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -69,7 +68,6 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest<ClusterAl
|
|||
|
||||
public ClusterAllocationExplainRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
checkVersion(in.getVersion());
|
||||
this.index = in.readOptionalString();
|
||||
this.shard = in.readOptionalVInt();
|
||||
this.primary = in.readOptionalBoolean();
|
||||
|
@ -94,7 +92,6 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest<ClusterAl
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
checkVersion(out.getVersion());
|
||||
super.writeTo(out);
|
||||
out.writeOptionalString(index);
|
||||
out.writeOptionalVInt(shard);
|
||||
|
@ -251,11 +248,4 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest<ClusterAl
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
}
|
||||
|
||||
private void checkVersion(Version version) {
|
||||
if (version.before(Version.V_5_2_0)) {
|
||||
throw new IllegalArgumentException("cannot explain shards in a mixed-cluster with pre-" + Version.V_5_2_0 +
|
||||
" nodes, node version [" + version + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.admin.cluster.shards;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
|
@ -59,10 +58,6 @@ public class ClusterSearchShardsRequest extends MasterNodeReadRequest<ClusterSea
|
|||
routing = in.readOptionalString();
|
||||
preference = in.readOptionalString();
|
||||
|
||||
if (in.getVersion().onOrBefore(Version.V_5_1_1)) {
|
||||
//types
|
||||
in.readStringArray();
|
||||
}
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
}
|
||||
|
||||
|
@ -78,10 +73,6 @@ public class ClusterSearchShardsRequest extends MasterNodeReadRequest<ClusterSea
|
|||
out.writeOptionalString(routing);
|
||||
out.writeOptionalString(preference);
|
||||
|
||||
if (out.getVersion().onOrBefore(Version.V_5_1_1)) {
|
||||
//types
|
||||
out.writeStringArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.admin.cluster.shards;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -77,14 +76,12 @@ public class ClusterSearchShardsResponse extends ActionResponse implements ToXCo
|
|||
for (int i = 0; i < nodes.length; i++) {
|
||||
nodes[i] = new DiscoveryNode(in);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
int size = in.readVInt();
|
||||
indicesAndFilters = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
String index = in.readString();
|
||||
AliasFilter aliasFilter = new AliasFilter(in);
|
||||
indicesAndFilters.put(index, aliasFilter);
|
||||
}
|
||||
int size = in.readVInt();
|
||||
indicesAndFilters = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
String index = in.readString();
|
||||
AliasFilter aliasFilter = new AliasFilter(in);
|
||||
indicesAndFilters.put(index, aliasFilter);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,12 +96,10 @@ public class ClusterSearchShardsResponse extends ActionResponse implements ToXCo
|
|||
for (DiscoveryNode node : nodes) {
|
||||
node.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeVInt(indicesAndFilters.size());
|
||||
for (Map.Entry<String, AliasFilter> entry : indicesAndFilters.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
out.writeVInt(indicesAndFilters.size());
|
||||
for (Map.Entry<String, AliasFilter> entry : indicesAndFilters.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.snapshots.SnapshotInfo.VERBOSE_INTRODUCED;
|
||||
|
||||
/**
|
||||
* Get snapshot request
|
||||
|
@ -75,9 +74,7 @@ public class GetSnapshotsRequest extends MasterNodeRequest<GetSnapshotsRequest>
|
|||
repository = in.readString();
|
||||
snapshots = in.readStringArray();
|
||||
ignoreUnavailable = in.readBoolean();
|
||||
if (in.getVersion().onOrAfter(VERBOSE_INTRODUCED)) {
|
||||
verbose = in.readBoolean();
|
||||
}
|
||||
verbose = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -86,9 +83,7 @@ public class GetSnapshotsRequest extends MasterNodeRequest<GetSnapshotsRequest>
|
|||
out.writeString(repository);
|
||||
out.writeStringArray(snapshots);
|
||||
out.writeBoolean(ignoreUnavailable);
|
||||
if (out.getVersion().onOrAfter(VERBOSE_INTRODUCED)) {
|
||||
out.writeBoolean(verbose);
|
||||
}
|
||||
out.writeBoolean(verbose);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -121,11 +121,7 @@ public class PutStoredScriptRequest extends AcknowledgedRequest<PutStoredScriptR
|
|||
}
|
||||
id = in.readOptionalString();
|
||||
content = in.readBytesReference();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
xContentType = XContentHelper.xContentType(content);
|
||||
}
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha2)) {
|
||||
context = in.readOptionalString();
|
||||
source = new StoredScriptSource(in);
|
||||
|
@ -143,9 +139,7 @@ public class PutStoredScriptRequest extends AcknowledgedRequest<PutStoredScriptR
|
|||
}
|
||||
out.writeOptionalString(id);
|
||||
out.writeBytesReference(content);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
out.writeEnum(xContentType);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha2)) {
|
||||
out.writeOptionalString(context);
|
||||
source.writeTo(out);
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.action.admin.indices.analyze;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -191,15 +190,10 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
|
|||
startOffset = in.readInt();
|
||||
endOffset = in.readInt();
|
||||
position = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
Integer len = in.readOptionalVInt();
|
||||
if (len != null) {
|
||||
positionLength = len;
|
||||
} else {
|
||||
positionLength = 1;
|
||||
}
|
||||
}
|
||||
else {
|
||||
Integer len = in.readOptionalVInt();
|
||||
if (len != null) {
|
||||
positionLength = len;
|
||||
} else {
|
||||
positionLength = 1;
|
||||
}
|
||||
type = in.readOptionalString();
|
||||
|
@ -212,9 +206,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
|
|||
out.writeInt(startOffset);
|
||||
out.writeInt(endOffset);
|
||||
out.writeVInt(position);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeOptionalVInt(positionLength > 1 ? positionLength : null);
|
||||
}
|
||||
out.writeOptionalVInt(positionLength > 1 ? positionLength : null);
|
||||
out.writeOptionalString(type);
|
||||
out.writeMapWithConsistentOrder(attributes);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.create;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -66,18 +65,14 @@ public class CreateIndexResponse extends ShardsAcknowledgedResponse {
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
readShardsAcknowledged(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
index = in.readString();
|
||||
}
|
||||
index = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
writeShardsAcknowledged(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
out.writeString(index);
|
||||
}
|
||||
out.writeString(index);
|
||||
}
|
||||
|
||||
public String index() {
|
||||
|
|
|
@ -297,10 +297,6 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
type = in.readOptionalString();
|
||||
source = in.readString();
|
||||
if (in.getVersion().before(Version.V_5_3_0)) {
|
||||
// we do not know the format from earlier versions so convert if necessary
|
||||
source = XContentHelper.convertToJson(new BytesArray(source), false, false, XContentFactory.xContentType(source));
|
||||
}
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
in.readBoolean(); // updateAllTypes
|
||||
}
|
||||
|
|
|
@ -492,11 +492,6 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
for (int i = 0; i < size; i++) {
|
||||
final String type = in.readString();
|
||||
String mappingSource = in.readString();
|
||||
if (in.getVersion().before(Version.V_5_3_0)) {
|
||||
// we do not know the incoming type so convert it if needed
|
||||
mappingSource =
|
||||
XContentHelper.convertToJson(new BytesArray(mappingSource), false, false, XContentFactory.xContentType(mappingSource));
|
||||
}
|
||||
mappings.put(type, mappingSource);
|
||||
}
|
||||
int customSize = in.readVInt();
|
||||
|
|
|
@ -120,11 +120,7 @@ public class QueryExplanation implements Streamable, ToXContentFragment {
|
|||
} else {
|
||||
index = in.readString();
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
shard = in.readInt();
|
||||
} else {
|
||||
shard = RANDOM_SHARD;
|
||||
}
|
||||
shard = in.readInt();
|
||||
valid = in.readBoolean();
|
||||
explanation = in.readOptionalString();
|
||||
error = in.readOptionalString();
|
||||
|
@ -137,9 +133,7 @@ public class QueryExplanation implements Streamable, ToXContentFragment {
|
|||
} else {
|
||||
out.writeString(index);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
out.writeInt(shard);
|
||||
}
|
||||
out.writeInt(shard);
|
||||
out.writeBoolean(valid);
|
||||
out.writeOptionalString(explanation);
|
||||
out.writeOptionalString(error);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.validate.query;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
|
@ -156,9 +155,7 @@ public class ValidateQueryRequest extends BroadcastRequest<ValidateQueryRequest>
|
|||
}
|
||||
explain = in.readBoolean();
|
||||
rewrite = in.readBoolean();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
allShards = in.readBoolean();
|
||||
}
|
||||
allShards = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -171,9 +168,7 @@ public class ValidateQueryRequest extends BroadcastRequest<ValidateQueryRequest>
|
|||
}
|
||||
out.writeBoolean(explain);
|
||||
out.writeBoolean(rewrite);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
out.writeBoolean(allShards);
|
||||
}
|
||||
out.writeBoolean(allShards);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -244,8 +244,8 @@ public class BulkItemResponse implements Streamable, StatusToXContentObject {
|
|||
}
|
||||
|
||||
private static boolean supportsAbortedFlag(Version version) {
|
||||
// The "aborted" flag was added for 5.5.3 and 5.6.0, but was not in 6.0.0-beta2
|
||||
return version.after(Version.V_6_0_0_beta2) || (version.major == 5 && version.onOrAfter(Version.V_5_5_3));
|
||||
// The "aborted" flag was not in 6.0.0-beta2
|
||||
return version.after(Version.V_6_0_0_beta2);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -447,11 +447,7 @@ public class BulkItemResponse implements Streamable, StatusToXContentObject {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
id = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
opType = OpType.fromId(in.readByte());
|
||||
} else {
|
||||
opType = OpType.fromString(in.readString());
|
||||
}
|
||||
opType = OpType.fromId(in.readByte());
|
||||
|
||||
byte type = in.readByte();
|
||||
if (type == 0) {
|
||||
|
@ -474,11 +470,7 @@ public class BulkItemResponse implements Streamable, StatusToXContentObject {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(id);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeByte(opType.getId());
|
||||
} else {
|
||||
out.writeString(opType.getLowercase());
|
||||
}
|
||||
out.writeByte(opType.getId());
|
||||
|
||||
if (response == null) {
|
||||
out.writeByte((byte) 2);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.fieldcaps;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
|
@ -81,24 +80,18 @@ public final class FieldCapabilitiesRequest extends ActionRequest implements Ind
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
fields = in.readStringArray();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
indices = in.readStringArray();
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
mergeResults = in.readBoolean();
|
||||
} else {
|
||||
mergeResults = true;
|
||||
}
|
||||
indices = in.readStringArray();
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
mergeResults = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringArray(fields);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
out.writeStringArray(indices);
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
out.writeBoolean(mergeResults);
|
||||
}
|
||||
out.writeStringArray(indices);
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
out.writeBoolean(mergeResults);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.fieldcaps;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
|
@ -95,11 +94,7 @@ public class FieldCapabilitiesResponse extends ActionResponse implements ToXCont
|
|||
super.readFrom(in);
|
||||
this.responseMap =
|
||||
in.readMap(StreamInput::readString, FieldCapabilitiesResponse::readField);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
indexResponses = in.readList(FieldCapabilitiesIndexResponse::new);
|
||||
} else {
|
||||
indexResponses = Collections.emptyList();
|
||||
}
|
||||
indexResponses = in.readList(FieldCapabilitiesIndexResponse::new);
|
||||
}
|
||||
|
||||
private static Map<String, FieldCapabilities> readField(StreamInput in) throws IOException {
|
||||
|
@ -110,10 +105,7 @@ public class FieldCapabilitiesResponse extends ActionResponse implements ToXCont
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeMap(responseMap, StreamOutput::writeString, FieldCapabilitiesResponse::writeField);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
out.writeList(indexResponses);
|
||||
}
|
||||
|
||||
out.writeList(indexResponses);
|
||||
}
|
||||
|
||||
private static void writeField(StreamOutput out,
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.ingest;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -82,11 +81,7 @@ public class PutPipelineRequest extends AcknowledgedRequest<PutPipelineRequest>
|
|||
super.readFrom(in);
|
||||
id = in.readString();
|
||||
source = in.readBytesReference();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
xContentType = XContentHelper.xContentType(source);
|
||||
}
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -94,9 +89,7 @@ public class PutPipelineRequest extends AcknowledgedRequest<PutPipelineRequest>
|
|||
super.writeTo(out);
|
||||
out.writeString(id);
|
||||
out.writeBytesReference(source);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.ingest;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -76,11 +75,7 @@ public class SimulatePipelineRequest extends ActionRequest implements ToXContent
|
|||
id = in.readOptionalString();
|
||||
verbose = in.readBoolean();
|
||||
source = in.readBytesReference();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
xContentType = XContentHelper.xContentType(source);
|
||||
}
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -123,9 +118,7 @@ public class SimulatePipelineRequest extends ActionRequest implements ToXContent
|
|||
out.writeOptionalString(id);
|
||||
out.writeBoolean(verbose);
|
||||
out.writeBytesReference(source);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -135,10 +135,8 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest
|
|||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
requestCache = in.readOptionalBoolean();
|
||||
batchedReduceSize = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
maxConcurrentShardRequests = in.readVInt();
|
||||
preFilterShardSize = in.readVInt();
|
||||
}
|
||||
maxConcurrentShardRequests = in.readVInt();
|
||||
preFilterShardSize = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_3_0)) {
|
||||
allowPartialSearchResults = in.readOptionalBoolean();
|
||||
}
|
||||
|
@ -160,10 +158,8 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest
|
|||
indicesOptions.writeIndicesOptions(out);
|
||||
out.writeOptionalBoolean(requestCache);
|
||||
out.writeVInt(batchedReduceSize);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
out.writeVInt(maxConcurrentShardRequests);
|
||||
out.writeVInt(preFilterShardSize);
|
||||
}
|
||||
out.writeVInt(maxConcurrentShardRequests);
|
||||
out.writeVInt(preFilterShardSize);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_3_0)) {
|
||||
out.writeOptionalBoolean(allowPartialSearchResults);
|
||||
}
|
||||
|
|
|
@ -374,9 +374,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
|
|||
}
|
||||
scrollId = in.readOptionalString();
|
||||
tookInMillis = in.readVLong();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
skippedShards = in.readVInt();
|
||||
}
|
||||
skippedShards = in.readVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -395,9 +393,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
|
|||
}
|
||||
out.writeOptionalString(scrollId);
|
||||
out.writeVLong(tookInMillis);
|
||||
if(out.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
out.writeVInt(skippedShards);
|
||||
}
|
||||
out.writeVInt(skippedShards);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionListenerResponseHandler;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
|
@ -113,17 +112,8 @@ public class SearchTransportService extends AbstractComponent {
|
|||
|
||||
public void sendCanMatch(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, final
|
||||
ActionListener<CanMatchResponse> listener) {
|
||||
if (connection.getNode().getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
transportService.sendChildRequest(connection, QUERY_CAN_MATCH_NAME, request, task,
|
||||
TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, CanMatchResponse::new));
|
||||
} else {
|
||||
// this might look weird but if we are in a CrossClusterSearch environment we can get a connection
|
||||
// to a pre 5.latest node which is proxied by a 5.latest node under the hood since we are only compatible with 5.latest
|
||||
// instead of sending the request we shortcut it here and let the caller deal with this -- see #25704
|
||||
// also failing the request instead of returning a fake answer might trigger a retry on a replica which might be on a
|
||||
// compatible node
|
||||
throw new IllegalArgumentException("can_match is not supported on pre 5.6 nodes");
|
||||
}
|
||||
transportService.sendChildRequest(connection, QUERY_CAN_MATCH_NAME, request, task,
|
||||
TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, CanMatchResponse::new));
|
||||
}
|
||||
|
||||
public void sendClearAllScrollContexts(Transport.Connection connection, final ActionListener<TransportResponse> listener) {
|
||||
|
|
|
@ -498,14 +498,10 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
|
||||
if (in.readBoolean()) {
|
||||
doc = in.readBytesReference();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
xContentType = XContentHelper.xContentType(doc);
|
||||
}
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
}
|
||||
routing = in.readOptionalString();
|
||||
|
||||
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
in.readOptionalString(); // _parent
|
||||
}
|
||||
|
@ -546,9 +542,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
out.writeBoolean(doc != null);
|
||||
if (doc != null) {
|
||||
out.writeBytesReference(doc);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
out.writeOptionalString(routing);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
|
|
|
@ -40,8 +40,6 @@ import java.util.Objects;
|
|||
public class SnapshotDeletionsInProgress extends AbstractNamedDiffable<Custom> implements Custom {
|
||||
|
||||
public static final String TYPE = "snapshot_deletions";
|
||||
// the version where SnapshotDeletionsInProgress was introduced
|
||||
public static final Version VERSION_INTRODUCED = Version.V_5_2_0;
|
||||
|
||||
// the list of snapshot deletion request entries
|
||||
private final List<Entry> entries;
|
||||
|
@ -135,7 +133,7 @@ public class SnapshotDeletionsInProgress extends AbstractNamedDiffable<Custom> i
|
|||
|
||||
@Override
|
||||
public Version getMinimalSupportedVersion() {
|
||||
return VERSION_INTRODUCED;
|
||||
return Version.CURRENT.minimumCompatibilityVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,12 +48,6 @@ import java.util.Map;
|
|||
public class SnapshotsInProgress extends AbstractNamedDiffable<Custom> implements Custom {
|
||||
public static final String TYPE = "snapshots";
|
||||
|
||||
// denotes an undefined repository state id, which will happen when receiving a cluster state with
|
||||
// a snapshot in progress from a pre 5.2.x node
|
||||
public static final long UNDEFINED_REPOSITORY_STATE_ID = -2L;
|
||||
// the version where repository state ids were introduced
|
||||
private static final Version REPOSITORY_ID_INTRODUCED_VERSION = Version.V_5_2_0;
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
@ -432,10 +426,7 @@ public class SnapshotsInProgress extends AbstractNamedDiffable<Custom> implement
|
|||
builder.put(shardId, new ShardSnapshotStatus(nodeId, shardState, reason));
|
||||
}
|
||||
}
|
||||
long repositoryStateId = UNDEFINED_REPOSITORY_STATE_ID;
|
||||
if (in.getVersion().onOrAfter(REPOSITORY_ID_INTRODUCED_VERSION)) {
|
||||
repositoryStateId = in.readLong();
|
||||
}
|
||||
long repositoryStateId = in.readLong();
|
||||
entries[i] = new Entry(snapshot,
|
||||
includeGlobalState,
|
||||
partial,
|
||||
|
@ -471,9 +462,7 @@ public class SnapshotsInProgress extends AbstractNamedDiffable<Custom> implement
|
|||
out.writeByte(shardEntry.value.state().value());
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(REPOSITORY_ID_INTRODUCED_VERSION)) {
|
||||
out.writeLong(entry.repositoryStateId);
|
||||
}
|
||||
out.writeLong(entry.repositoryStateId);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.cluster.block;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
|
@ -138,11 +137,7 @@ public class ClusterBlock implements Streamable, ToXContentFragment {
|
|||
retryable = in.readBoolean();
|
||||
disableStatePersistence = in.readBoolean();
|
||||
status = RestStatus.readFrom(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
allowReleaseResources = in.readBoolean();
|
||||
} else {
|
||||
allowReleaseResources = false;
|
||||
}
|
||||
allowReleaseResources = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -156,9 +151,7 @@ public class ClusterBlock implements Streamable, ToXContentFragment {
|
|||
out.writeBoolean(retryable);
|
||||
out.writeBoolean(disableStatePersistence);
|
||||
RestStatus.writeTo(out, status);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
out.writeBoolean(allowReleaseResources);
|
||||
}
|
||||
out.writeBoolean(allowReleaseResources);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.cluster.routing.allocation;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -82,11 +81,7 @@ public class NodeAllocationResult implements ToXContentObject, Writeable, Compar
|
|||
public NodeAllocationResult(StreamInput in) throws IOException {
|
||||
node = new DiscoveryNode(in);
|
||||
shardStoreInfo = in.readOptionalWriteable(ShardStoreInfo::new);
|
||||
if (in.getVersion().before(Version.V_5_2_1)) {
|
||||
canAllocateDecision = Decision.readFrom(in);
|
||||
} else {
|
||||
canAllocateDecision = in.readOptionalWriteable(Decision::readFrom);
|
||||
}
|
||||
canAllocateDecision = in.readOptionalWriteable(Decision::readFrom);
|
||||
nodeDecision = AllocationDecision.readFrom(in);
|
||||
weightRanking = in.readVInt();
|
||||
}
|
||||
|
@ -95,15 +90,7 @@ public class NodeAllocationResult implements ToXContentObject, Writeable, Compar
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
node.writeTo(out);
|
||||
out.writeOptionalWriteable(shardStoreInfo);
|
||||
if (out.getVersion().before(Version.V_5_2_1)) {
|
||||
if (canAllocateDecision == null) {
|
||||
Decision.NO.writeTo(out);
|
||||
} else {
|
||||
canAllocateDecision.writeTo(out);
|
||||
}
|
||||
} else {
|
||||
out.writeOptionalWriteable(canAllocateDecision);
|
||||
}
|
||||
out.writeOptionalWriteable(canAllocateDecision);
|
||||
nodeDecision.writeTo(out);
|
||||
out.writeVInt(weightRanking);
|
||||
}
|
||||
|
|
|
@ -150,13 +150,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
*/
|
||||
public InnerHitBuilder(StreamInput in) throws IOException {
|
||||
name = in.readOptionalString();
|
||||
if (in.getVersion().before(Version.V_5_5_0)) {
|
||||
in.readOptionalString();
|
||||
in.readOptionalString();
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
ignoreUnmapped = in.readBoolean();
|
||||
}
|
||||
ignoreUnmapped = in.readBoolean();
|
||||
from = in.readVInt();
|
||||
size = in.readVInt();
|
||||
explain = in.readBoolean();
|
||||
|
@ -191,14 +185,6 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
}
|
||||
}
|
||||
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
|
||||
if (in.getVersion().before(Version.V_5_5_0)) {
|
||||
/**
|
||||
* this is needed for BWC with nodes pre 5.5
|
||||
*/
|
||||
in.readNamedWriteable(QueryBuilder.class);
|
||||
boolean hasChildren = in.readBoolean();
|
||||
assert hasChildren == false;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
this.innerCollapseBuilder = in.readOptionalWriteable(CollapseBuilder::new);
|
||||
}
|
||||
|
@ -206,9 +192,6 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
if (out.getVersion().before(Version.V_5_5_0)) {
|
||||
throw new IOException("Invalid output version, must >= " + Version.V_5_5_0.toString());
|
||||
}
|
||||
out.writeOptionalString(name);
|
||||
out.writeBoolean(ignoreUnmapped);
|
||||
out.writeVInt(from);
|
||||
|
@ -252,84 +235,6 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* BWC serialization for nested {@link InnerHitBuilder}.
|
||||
* Should only be used to send nested inner hits to nodes pre 5.5.
|
||||
*/
|
||||
protected void writeToNestedBWC(StreamOutput out, QueryBuilder query, String nestedPath) throws IOException {
|
||||
assert out.getVersion().before(Version.V_5_5_0) :
|
||||
"invalid output version, must be < " + Version.V_5_5_0.toString();
|
||||
writeToBWC(out, query, nestedPath, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* BWC serialization for collapsing {@link InnerHitBuilder}.
|
||||
* Should only be used to send collapsing inner hits to nodes pre 5.5.
|
||||
*/
|
||||
public void writeToCollapseBWC(StreamOutput out) throws IOException {
|
||||
assert out.getVersion().before(Version.V_5_5_0) :
|
||||
"invalid output version, must be < " + Version.V_5_5_0.toString();
|
||||
writeToBWC(out, new MatchAllQueryBuilder(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* BWC serialization for parent/child {@link InnerHitBuilder}.
|
||||
* Should only be used to send hasParent or hasChild inner hits to nodes pre 5.5.
|
||||
*/
|
||||
public void writeToParentChildBWC(StreamOutput out, QueryBuilder query, String parentChildPath) throws IOException {
|
||||
assert(out.getVersion().before(Version.V_5_5_0)) :
|
||||
"invalid output version, must be < " + Version.V_5_5_0.toString();
|
||||
writeToBWC(out, query, null, parentChildPath);
|
||||
}
|
||||
|
||||
private void writeToBWC(StreamOutput out,
|
||||
QueryBuilder query,
|
||||
String nestedPath,
|
||||
String parentChildPath) throws IOException {
|
||||
out.writeOptionalString(name);
|
||||
if (nestedPath != null) {
|
||||
out.writeOptionalString(nestedPath);
|
||||
out.writeOptionalString(null);
|
||||
} else {
|
||||
out.writeOptionalString(null);
|
||||
out.writeOptionalString(parentChildPath);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeBoolean(ignoreUnmapped);
|
||||
}
|
||||
out.writeVInt(from);
|
||||
out.writeVInt(size);
|
||||
out.writeBoolean(explain);
|
||||
out.writeBoolean(version);
|
||||
out.writeBoolean(trackScores);
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeGenericValue(docValueFields == null
|
||||
? null
|
||||
: docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList()));
|
||||
boolean hasScriptFields = scriptFields != null;
|
||||
out.writeBoolean(hasScriptFields);
|
||||
if (hasScriptFields) {
|
||||
out.writeVInt(scriptFields.size());
|
||||
Iterator<ScriptField> iterator = scriptFields.stream()
|
||||
.sorted(Comparator.comparing(ScriptField::fieldName)).iterator();
|
||||
while (iterator.hasNext()) {
|
||||
iterator.next().writeTo(out);
|
||||
}
|
||||
}
|
||||
out.writeOptionalWriteable(fetchSourceContext);
|
||||
boolean hasSorts = sorts != null;
|
||||
out.writeBoolean(hasSorts);
|
||||
if (hasSorts) {
|
||||
out.writeVInt(sorts.size());
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
out.writeNamedWriteable(sort);
|
||||
}
|
||||
}
|
||||
out.writeOptionalWriteable(highlightBuilder);
|
||||
out.writeNamedWriteable(query);
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
|
||||
|
@ -47,7 +46,6 @@ import org.elasticsearch.common.lucene.uid.Versions;
|
|||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
|
@ -220,11 +218,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
type = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
doc = (BytesReference) in.readGenericValue();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
xContentType = XContentHelper.xContentType(doc);
|
||||
}
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
} else {
|
||||
id = in.readString();
|
||||
}
|
||||
|
@ -242,9 +236,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
out.writeBoolean(doc != null);
|
||||
if (doc != null) {
|
||||
out.writeGenericValue(doc);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
out.writeEnum(xContentType);
|
||||
} else {
|
||||
out.writeString(id);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.lucene.search.Weight;
|
|||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.search.join.ParentChildrenBlockJoinQuery;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -103,15 +102,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
out.writeString(path);
|
||||
out.writeVInt(scoreMode.ordinal());
|
||||
out.writeNamedWriteable(query);
|
||||
if (out.getVersion().before(Version.V_5_5_0)) {
|
||||
final boolean hasInnerHit = innerHitBuilder != null;
|
||||
out.writeBoolean(hasInnerHit);
|
||||
if (hasInnerHit) {
|
||||
innerHitBuilder.writeToNestedBWC(out, query, path);
|
||||
}
|
||||
} else {
|
||||
out.writeOptionalWriteable(innerHitBuilder);
|
||||
}
|
||||
out.writeOptionalWriteable(innerHitBuilder);
|
||||
out.writeBoolean(ignoreUnmapped);
|
||||
}
|
||||
|
||||
|
|
|
@ -175,9 +175,6 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
analyzer = in.readOptionalString();
|
||||
quoteAnalyzer = in.readOptionalString();
|
||||
quoteFieldSuffix = in.readOptionalString();
|
||||
if (in.getVersion().before(Version.V_6_0_0_beta1)) {
|
||||
in.readBoolean(); // auto_generate_phrase_query
|
||||
}
|
||||
allowLeadingWildcard = in.readOptionalBoolean();
|
||||
analyzeWildcard = in.readOptionalBoolean();
|
||||
enablePositionIncrements = in.readBoolean();
|
||||
|
@ -186,27 +183,15 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
fuzzyMaxExpansions = in.readVInt();
|
||||
fuzzyRewrite = in.readOptionalString();
|
||||
phraseSlop = in.readVInt();
|
||||
if (in.getVersion().before(Version.V_6_0_0_beta1)) {
|
||||
in.readBoolean(); // use_dismax
|
||||
tieBreaker = in.readFloat();
|
||||
type = DEFAULT_TYPE;
|
||||
} else {
|
||||
type = MultiMatchQueryBuilder.Type.readFromStream(in);
|
||||
tieBreaker = in.readOptionalFloat();
|
||||
}
|
||||
type = MultiMatchQueryBuilder.Type.readFromStream(in);
|
||||
tieBreaker = in.readOptionalFloat();
|
||||
|
||||
rewrite = in.readOptionalString();
|
||||
minimumShouldMatch = in.readOptionalString();
|
||||
lenient = in.readOptionalBoolean();
|
||||
timeZone = in.readOptionalTimeZone();
|
||||
escape = in.readBoolean();
|
||||
maxDeterminizedStates = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1) && in.getVersion().before(Version.V_6_0_0_beta1)) {
|
||||
in.readBoolean(); // split_on_whitespace
|
||||
Boolean useAllField = in.readOptionalBoolean();
|
||||
if (useAllField != null && useAllField) {
|
||||
defaultField = "*";
|
||||
}
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
autoGenerateSynonymsPhraseQuery = in.readBoolean();
|
||||
fuzzyTranspositions = in.readBoolean();
|
||||
|
@ -226,9 +211,6 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
out.writeOptionalString(this.analyzer);
|
||||
out.writeOptionalString(this.quoteAnalyzer);
|
||||
out.writeOptionalString(this.quoteFieldSuffix);
|
||||
if (out.getVersion().before(Version.V_6_0_0_beta1)) {
|
||||
out.writeBoolean(false); // auto_generate_phrase_query
|
||||
}
|
||||
out.writeOptionalBoolean(this.allowLeadingWildcard);
|
||||
out.writeOptionalBoolean(this.analyzeWildcard);
|
||||
out.writeBoolean(this.enablePositionIncrements);
|
||||
|
@ -237,24 +219,14 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
out.writeVInt(this.fuzzyMaxExpansions);
|
||||
out.writeOptionalString(this.fuzzyRewrite);
|
||||
out.writeVInt(this.phraseSlop);
|
||||
if (out.getVersion().before(Version.V_6_0_0_beta1)) {
|
||||
out.writeBoolean(true); // use_dismax
|
||||
out.writeFloat(tieBreaker != null ? tieBreaker : 0.0f);
|
||||
} else {
|
||||
type.writeTo(out);
|
||||
out.writeOptionalFloat(tieBreaker);
|
||||
}
|
||||
type.writeTo(out);
|
||||
out.writeOptionalFloat(tieBreaker);
|
||||
out.writeOptionalString(this.rewrite);
|
||||
out.writeOptionalString(this.minimumShouldMatch);
|
||||
out.writeOptionalBoolean(this.lenient);
|
||||
out.writeOptionalTimeZone(timeZone);
|
||||
out.writeBoolean(this.escape);
|
||||
out.writeVInt(this.maxDeterminizedStates);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1) && out.getVersion().before(Version.V_6_0_0_beta1)) {
|
||||
out.writeBoolean(false); // split_on_whitespace
|
||||
Boolean useAllFields = defaultField == null ? null : Regex.isMatchAllPattern(defaultField);
|
||||
out.writeOptionalBoolean(useAllFields);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
out.writeBoolean(autoGenerateSynonymsPhraseQuery);
|
||||
out.writeBoolean(fuzzyTranspositions);
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -108,14 +107,12 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||
if (formatString != null) {
|
||||
format = Joda.forPattern(formatString);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
String relationString = in.readOptionalString();
|
||||
if (relationString != null) {
|
||||
relation = ShapeRelation.getRelationByName(relationString);
|
||||
if (relation != null && !isRelationAllowed(relation)) {
|
||||
throw new IllegalArgumentException(
|
||||
"[range] query does not support relation [" + relationString + "]");
|
||||
}
|
||||
String relationString = in.readOptionalString();
|
||||
if (relationString != null) {
|
||||
relation = ShapeRelation.getRelationByName(relationString);
|
||||
if (relation != null && !isRelationAllowed(relation)) {
|
||||
throw new IllegalArgumentException(
|
||||
"[range] query does not support relation [" + relationString + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -139,13 +136,11 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||
formatString = this.format.format();
|
||||
}
|
||||
out.writeOptionalString(formatString);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
String relationString = null;
|
||||
if (this.relation != null) {
|
||||
relationString = this.relation.getRelationName();
|
||||
}
|
||||
out.writeOptionalString(relationString);
|
||||
String relationString = null;
|
||||
if (this.relation != null) {
|
||||
relationString = this.relation.getRelationName();
|
||||
}
|
||||
out.writeOptionalString(relationString);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -168,27 +168,11 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
flags = in.readInt();
|
||||
analyzer = in.readOptionalString();
|
||||
defaultOperator = Operator.readFromStream(in);
|
||||
if (in.getVersion().before(Version.V_5_1_1)) {
|
||||
in.readBoolean(); // lowercase_expanded_terms
|
||||
}
|
||||
settings.lenient(in.readBoolean());
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
this.lenientSet = in.readBoolean();
|
||||
}
|
||||
this.lenientSet = in.readBoolean();
|
||||
settings.analyzeWildcard(in.readBoolean());
|
||||
if (in.getVersion().before(Version.V_5_1_1)) {
|
||||
in.readString(); // locale
|
||||
}
|
||||
minimumShouldMatch = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
settings.quoteFieldSuffix(in.readOptionalString());
|
||||
if (in.getVersion().before(Version.V_6_0_0_beta2)) {
|
||||
Boolean useAllFields = in.readOptionalBoolean();
|
||||
if (useAllFields != null && useAllFields) {
|
||||
useAllFields(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
settings.quoteFieldSuffix(in.readOptionalString());
|
||||
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
settings.autoGenerateSynonymsPhraseQuery(in.readBoolean());
|
||||
settings.fuzzyPrefixLength(in.readVInt());
|
||||
|
@ -208,28 +192,11 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
out.writeInt(flags);
|
||||
out.writeOptionalString(analyzer);
|
||||
defaultOperator.writeTo(out);
|
||||
if (out.getVersion().before(Version.V_5_1_1)) {
|
||||
out.writeBoolean(true); // lowercase_expanded_terms
|
||||
}
|
||||
out.writeBoolean(settings.lenient());
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeBoolean(lenientSet);
|
||||
}
|
||||
out.writeBoolean(lenientSet);
|
||||
out.writeBoolean(settings.analyzeWildcard());
|
||||
if (out.getVersion().before(Version.V_5_1_1)) {
|
||||
out.writeString(Locale.ROOT.toLanguageTag()); // locale
|
||||
}
|
||||
out.writeOptionalString(minimumShouldMatch);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeOptionalString(settings.quoteFieldSuffix());
|
||||
if (out.getVersion().before(Version.V_6_0_0_beta2)) {
|
||||
if (useAllFields()) {
|
||||
out.writeOptionalBoolean(true);
|
||||
} else {
|
||||
out.writeOptionalBoolean(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
out.writeOptionalString(settings.quoteFieldSuffix());
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
out.writeBoolean(settings.autoGenerateSynonymsPhraseQuery());
|
||||
out.writeVInt(settings.fuzzyPrefixLength());
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.reindex;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -303,11 +302,7 @@ public class BulkByScrollTask extends CancellableTask {
|
|||
}
|
||||
|
||||
public Status(StreamInput in) throws IOException {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
sliceId = in.readOptionalVInt();
|
||||
} else {
|
||||
sliceId = null;
|
||||
}
|
||||
sliceId = in.readOptionalVInt();
|
||||
total = in.readVLong();
|
||||
updated = in.readVLong();
|
||||
created = in.readVLong();
|
||||
|
@ -321,18 +316,12 @@ public class BulkByScrollTask extends CancellableTask {
|
|||
requestsPerSecond = in.readFloat();
|
||||
reasonCancelled = in.readOptionalString();
|
||||
throttledUntil = in.readTimeValue();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
sliceStatuses = in.readList(stream -> stream.readOptionalWriteable(StatusOrException::new));
|
||||
} else {
|
||||
sliceStatuses = emptyList();
|
||||
}
|
||||
sliceStatuses = in.readList(stream -> stream.readOptionalWriteable(StatusOrException::new));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeOptionalVInt(sliceId);
|
||||
}
|
||||
out.writeOptionalVInt(sliceId);
|
||||
out.writeVLong(total);
|
||||
out.writeVLong(updated);
|
||||
out.writeVLong(created);
|
||||
|
@ -346,11 +335,9 @@ public class BulkByScrollTask extends CancellableTask {
|
|||
out.writeFloat(requestsPerSecond);
|
||||
out.writeOptionalString(reasonCancelled);
|
||||
out.writeTimeValue(throttledUntil);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeVInt(sliceStatuses.size());
|
||||
for (StatusOrException sliceStatus : sliceStatuses) {
|
||||
out.writeOptionalWriteable(sliceStatus);
|
||||
}
|
||||
out.writeVInt(sliceStatuses.size());
|
||||
for (StatusOrException sliceStatus : sliceStatuses) {
|
||||
out.writeOptionalWriteable(sliceStatus);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -92,13 +92,8 @@ public class RemoteInfo implements Writeable {
|
|||
headers.put(in.readString(), in.readString());
|
||||
}
|
||||
this.headers = unmodifiableMap(headers);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
socketTimeout = in.readTimeValue();
|
||||
connectTimeout = in.readTimeValue();
|
||||
} else {
|
||||
socketTimeout = DEFAULT_SOCKET_TIMEOUT;
|
||||
connectTimeout = DEFAULT_CONNECT_TIMEOUT;
|
||||
}
|
||||
socketTimeout = in.readTimeValue();
|
||||
connectTimeout = in.readTimeValue();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
pathPrefix = in.readOptionalString();
|
||||
} else {
|
||||
|
@ -119,10 +114,8 @@ public class RemoteInfo implements Writeable {
|
|||
out.writeString(header.getKey());
|
||||
out.writeString(header.getValue());
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeTimeValue(socketTimeout);
|
||||
out.writeTimeValue(connectTimeout);
|
||||
}
|
||||
out.writeTimeValue(socketTimeout);
|
||||
out.writeTimeValue(connectTimeout);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
out.writeOptionalString(pathPrefix);
|
||||
}
|
||||
|
|
|
@ -560,9 +560,6 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL
|
|||
}
|
||||
|
||||
boolean includeNumDocs(Version version) {
|
||||
if (version.major == Version.V_5_6_8.major) {
|
||||
return version.onOrAfter(Version.V_5_6_8);
|
||||
}
|
||||
return version.onOrAfter(Version.V_6_2_2);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.ingest;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
@ -117,13 +116,7 @@ public final class PipelineConfiguration extends AbstractDiffable<PipelineConfig
|
|||
}
|
||||
|
||||
public static PipelineConfiguration readFrom(StreamInput in) throws IOException {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
return new PipelineConfiguration(in.readString(), in.readBytesReference(), in.readEnum(XContentType.class));
|
||||
} else {
|
||||
final String id = in.readString();
|
||||
final BytesReference config = in.readBytesReference();
|
||||
return new PipelineConfiguration(id, config, XContentHelper.xContentType(config));
|
||||
}
|
||||
return new PipelineConfiguration(in.readString(), in.readBytesReference(), in.readEnum(XContentType.class));
|
||||
}
|
||||
|
||||
public static Diff<PipelineConfiguration> readDiffFrom(StreamInput in) throws IOException {
|
||||
|
@ -134,9 +127,7 @@ public final class PipelineConfiguration extends AbstractDiffable<PipelineConfig
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(id);
|
||||
out.writeBytesReference(config);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
out.writeEnum(xContentType);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -52,11 +52,7 @@ public class OsStats implements Writeable, ToXContentFragment {
|
|||
this.cpu = new Cpu(in);
|
||||
this.mem = new Mem(in);
|
||||
this.swap = new Swap(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
this.cgroup = in.readOptionalWriteable(Cgroup::new);
|
||||
} else {
|
||||
this.cgroup = null;
|
||||
}
|
||||
this.cgroup = in.readOptionalWriteable(Cgroup::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -65,9 +61,7 @@ public class OsStats implements Writeable, ToXContentFragment {
|
|||
cpu.writeTo(out);
|
||||
mem.writeTo(out);
|
||||
swap.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
out.writeOptionalWriteable(cgroup);
|
||||
}
|
||||
out.writeOptionalWriteable(cgroup);
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
|
|
|
@ -188,7 +188,7 @@ public final class PersistentTasksCustomMetaData extends AbstractNamedDiffable<M
|
|||
|
||||
@Override
|
||||
public Version getMinimalSupportedVersion() {
|
||||
return Version.V_5_4_0;
|
||||
return Version.CURRENT.minimumCompatibilityVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -107,11 +107,7 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
} else {
|
||||
extendedPlugins = Collections.emptyList();
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
hasNativeController = in.readBoolean();
|
||||
} else {
|
||||
hasNativeController = false;
|
||||
}
|
||||
hasNativeController = in.readBoolean();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta2) && in.getVersion().before(Version.V_6_3_0)) {
|
||||
/*
|
||||
* Elasticsearch versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was
|
||||
|
@ -134,9 +130,7 @@ public class PluginInfo implements Writeable, ToXContentObject {
|
|||
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
|
||||
out.writeStringList(extendedPlugins);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
out.writeBoolean(hasNativeController);
|
||||
}
|
||||
out.writeBoolean(hasNativeController);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta2) && out.getVersion().before(Version.V_6_3_0)) {
|
||||
/*
|
||||
* Elasticsearch versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.elasticsearch.ElasticsearchParseException;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.SnapshotsInProgress;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
|
@ -719,7 +718,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
protected void writeIndexGen(final RepositoryData repositoryData, final long repositoryStateId) throws IOException {
|
||||
assert isReadOnly() == false; // can not write to a read only repository
|
||||
final long currentGen = latestIndexBlobId();
|
||||
if (repositoryStateId != SnapshotsInProgress.UNDEFINED_REPOSITORY_STATE_ID && currentGen != repositoryStateId) {
|
||||
if (currentGen != repositoryStateId) {
|
||||
// the index file was updated by a concurrent operation, so we were operating on stale
|
||||
// repository data
|
||||
throw new RepositoryException(metadata.name(), "concurrent modification of the index-N file, expected current generation [" +
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
|
@ -451,133 +450,24 @@ public final class Script implements ToXContentObject, Writeable {
|
|||
* Creates a {@link Script} read from an input stream.
|
||||
*/
|
||||
public Script(StreamInput in) throws IOException {
|
||||
// Version 5.3 allows lang to be an optional parameter for stored scripts and expects
|
||||
// options to be null for stored and file scripts.
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
this.type = ScriptType.readFrom(in);
|
||||
this.lang = in.readOptionalString();
|
||||
this.idOrCode = in.readString();
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> options = (Map<String, String>)(Map)in.readMap();
|
||||
this.options = options;
|
||||
this.params = in.readMap();
|
||||
// Version 5.1 to 5.3 (exclusive) requires all Script members to be non-null and supports the potential
|
||||
// for more options than just XContentType. Reorders the read in contents to be in
|
||||
// same order as the constructor.
|
||||
} else if (in.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
this.type = ScriptType.readFrom(in);
|
||||
String lang = in.readString();
|
||||
this.lang = this.type == ScriptType.STORED ? null : lang;
|
||||
|
||||
this.idOrCode = in.readString();
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> options = (Map<String, String>)(Map)in.readMap();
|
||||
|
||||
if (this.type != ScriptType.INLINE && options.isEmpty()) {
|
||||
this.options = null;
|
||||
} else {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
this.params = in.readMap();
|
||||
// Prior to version 5.1 the script members are read in certain cases as optional and given
|
||||
// default values when necessary. Also the only option supported is for XContentType.
|
||||
} else {
|
||||
this.idOrCode = in.readString();
|
||||
|
||||
if (in.readBoolean()) {
|
||||
this.type = ScriptType.readFrom(in);
|
||||
} else {
|
||||
this.type = DEFAULT_SCRIPT_TYPE;
|
||||
}
|
||||
|
||||
String lang = in.readOptionalString();
|
||||
|
||||
if (lang == null) {
|
||||
this.lang = this.type == ScriptType.STORED ? null : DEFAULT_SCRIPT_LANG;
|
||||
} else {
|
||||
this.lang = lang;
|
||||
}
|
||||
|
||||
Map<String, Object> params = in.readMap();
|
||||
|
||||
if (params == null) {
|
||||
this.params = new HashMap<>();
|
||||
} else {
|
||||
this.params = params;
|
||||
}
|
||||
|
||||
if (in.readBoolean()) {
|
||||
this.options = new HashMap<>();
|
||||
XContentType contentType = in.readEnum(XContentType.class);
|
||||
this.options.put(CONTENT_TYPE_OPTION, contentType.mediaType());
|
||||
} else if (type == ScriptType.INLINE) {
|
||||
options = new HashMap<>();
|
||||
} else {
|
||||
this.options = null;
|
||||
}
|
||||
}
|
||||
this.type = ScriptType.readFrom(in);
|
||||
this.lang = in.readOptionalString();
|
||||
this.idOrCode = in.readString();
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> options = (Map<String, String>)(Map)in.readMap();
|
||||
this.options = options;
|
||||
this.params = in.readMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
// Version 5.3+ allows lang to be an optional parameter for stored scripts and expects
|
||||
// options to be null for stored and file scripts.
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
type.writeTo(out);
|
||||
out.writeOptionalString(lang);
|
||||
out.writeString(idOrCode);
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> options = (Map<String, Object>)(Map)this.options;
|
||||
out.writeMap(options);
|
||||
out.writeMap(params);
|
||||
// Version 5.1 to 5.3 (exclusive) requires all Script members to be non-null and supports the potential
|
||||
// for more options than just XContentType. Reorders the written out contents to be in
|
||||
// same order as the constructor.
|
||||
} else if (out.getVersion().onOrAfter(Version.V_5_1_1)) {
|
||||
type.writeTo(out);
|
||||
|
||||
if (lang == null) {
|
||||
out.writeString("");
|
||||
} else {
|
||||
out.writeString(lang);
|
||||
}
|
||||
|
||||
out.writeString(idOrCode);
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> options = (Map<String, Object>)(Map)this.options;
|
||||
|
||||
if (options == null) {
|
||||
out.writeMap(new HashMap<>());
|
||||
} else {
|
||||
out.writeMap(options);
|
||||
}
|
||||
|
||||
out.writeMap(params);
|
||||
// Prior to version 5.1 the Script members were possibly written as optional or null, though there is no case where a null
|
||||
// value wasn't equivalent to it's default value when actually compiling/executing a script. Meaning, there are no
|
||||
// backwards compatibility issues, and now there's enforced consistency. Also the only supported compiler
|
||||
// option was XContentType.
|
||||
} else {
|
||||
out.writeString(idOrCode);
|
||||
out.writeBoolean(true);
|
||||
type.writeTo(out);
|
||||
out.writeOptionalString(lang);
|
||||
|
||||
if (params.isEmpty()) {
|
||||
out.writeMap(null);
|
||||
} else {
|
||||
out.writeMap(params);
|
||||
}
|
||||
|
||||
if (options != null && options.containsKey(CONTENT_TYPE_OPTION)) {
|
||||
XContentType contentType = XContentType.fromMediaTypeOrFormat(options.get(CONTENT_TYPE_OPTION));
|
||||
out.writeBoolean(true);
|
||||
out.writeEnum(contentType);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
type.writeTo(out);
|
||||
out.writeOptionalString(lang);
|
||||
out.writeString(idOrCode);
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> options = (Map<String, Object>) (Map) this.options;
|
||||
out.writeMap(options);
|
||||
out.writeMap(params);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -292,25 +292,7 @@ public final class ScriptMetaData implements MetaData.Custom, Writeable, ToXCont
|
|||
|
||||
for (int i = 0; i < size; i++) {
|
||||
String id = in.readString();
|
||||
|
||||
// Prior to version 5.3 all scripts were stored using the deprecated namespace.
|
||||
// Split the id to find the language then use StoredScriptSource to parse the
|
||||
// expected BytesReference after which a new StoredScriptSource is created
|
||||
// with the appropriate language and options.
|
||||
if (in.getVersion().before(Version.V_5_3_0)) {
|
||||
int split = id.indexOf('#');
|
||||
|
||||
if (split == -1) {
|
||||
throw new IllegalArgumentException("illegal stored script id [" + id + "], does not contain lang");
|
||||
} else {
|
||||
source = new StoredScriptSource(in);
|
||||
source = new StoredScriptSource(id.substring(0, split), source.getSource(), Collections.emptyMap());
|
||||
}
|
||||
// Version 5.3+ can just be parsed normally using StoredScriptSource.
|
||||
} else {
|
||||
source = new StoredScriptSource(in);
|
||||
}
|
||||
|
||||
source = new StoredScriptSource(in);
|
||||
scripts.put(id, source);
|
||||
}
|
||||
|
||||
|
@ -319,34 +301,11 @@ public final class ScriptMetaData implements MetaData.Custom, Writeable, ToXCont
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
// Version 5.3+ will output the contents of the scripts' Map using
|
||||
// StoredScriptSource to stored the language, code, and options.
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeVInt(scripts.size());
|
||||
out.writeVInt(scripts.size());
|
||||
|
||||
for (Map.Entry<String, StoredScriptSource> entry : scripts.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
// Prior to Version 5.3, stored scripts can only be read using the deprecated
|
||||
// namespace. Scripts using the deprecated namespace are first isolated in a
|
||||
// temporary Map, then written out. Since all scripts will be stored using the
|
||||
// deprecated namespace, no scripts will be lost.
|
||||
} else {
|
||||
Map<String, StoredScriptSource> filtered = new HashMap<>();
|
||||
|
||||
for (Map.Entry<String, StoredScriptSource> entry : scripts.entrySet()) {
|
||||
if (entry.getKey().contains("#")) {
|
||||
filtered.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
out.writeVInt(filtered.size());
|
||||
|
||||
for (Map.Entry<String, StoredScriptSource> entry : filtered.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
for (Map.Entry<String, StoredScriptSource> entry : scripts.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.OriginalIndices;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -52,11 +51,7 @@ public final class SearchShardTarget implements Writeable, Comparable<SearchShar
|
|||
}
|
||||
shardId = ShardId.readShardId(in);
|
||||
this.originalIndices = null;
|
||||
if (in.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
clusterAlias = in.readOptionalString();
|
||||
} else {
|
||||
clusterAlias = null;
|
||||
}
|
||||
clusterAlias = in.readOptionalString();
|
||||
}
|
||||
|
||||
public SearchShardTarget(String nodeId, ShardId shardId, String clusterAlias, OriginalIndices originalIndices) {
|
||||
|
@ -121,9 +116,7 @@ public final class SearchShardTarget implements Writeable, Comparable<SearchShar
|
|||
out.writeText(nodeId);
|
||||
}
|
||||
shardId.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
out.writeOptionalString(clusterAlias);
|
||||
}
|
||||
out.writeOptionalString(clusterAlias);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.apache.lucene.util.automaton.CompiledAutomaton;
|
|||
import org.apache.lucene.util.automaton.Operations;
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -424,13 +423,8 @@ public class IncludeExclude implements Writeable, ToXContentFragment {
|
|||
} else {
|
||||
excludeValues = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
incNumPartitions = in.readVInt();
|
||||
incZeroBasedPartition = in.readVInt();
|
||||
} else {
|
||||
incNumPartitions = 0;
|
||||
incZeroBasedPartition = 0;
|
||||
}
|
||||
incNumPartitions = in.readVInt();
|
||||
incZeroBasedPartition = in.readVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -457,10 +451,8 @@ public class IncludeExclude implements Writeable, ToXContentFragment {
|
|||
out.writeBytesRef(value);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeVInt(incNumPartitions);
|
||||
out.writeVInt(incZeroBasedPartition);
|
||||
}
|
||||
out.writeVInt(incNumPartitions);
|
||||
out.writeVInt(incZeroBasedPartition);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -248,9 +248,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
profile = in.readBoolean();
|
||||
searchAfterBuilder = in.readOptionalWriteable(SearchAfterBuilder::new);
|
||||
sliceBuilder = in.readOptionalWriteable(SliceBuilder::new);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
collapse = in.readOptionalWriteable(CollapseBuilder::new);
|
||||
}
|
||||
collapse = in.readOptionalWriteable(CollapseBuilder::new);
|
||||
if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
trackTotalHits = in.readBoolean();
|
||||
} else {
|
||||
|
@ -313,9 +311,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
|
|||
out.writeBoolean(profile);
|
||||
out.writeOptionalWriteable(searchAfterBuilder);
|
||||
out.writeOptionalWriteable(sliceBuilder);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_3_0)) {
|
||||
out.writeOptionalWriteable(collapse);
|
||||
}
|
||||
out.writeOptionalWriteable(collapse);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
out.writeBoolean(trackTotalHits);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.search.collapse;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -94,31 +93,14 @@ public class CollapseBuilder implements Writeable, ToXContentObject {
|
|||
public CollapseBuilder(StreamInput in) throws IOException {
|
||||
this.field = in.readString();
|
||||
this.maxConcurrentGroupRequests = in.readVInt();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
this.innerHits = in.readList(InnerHitBuilder::new);
|
||||
} else {
|
||||
InnerHitBuilder innerHitBuilder = in.readOptionalWriteable(InnerHitBuilder::new);
|
||||
if (innerHitBuilder != null) {
|
||||
this.innerHits = Collections.singletonList(innerHitBuilder);
|
||||
} else {
|
||||
this.innerHits = Collections.emptyList();
|
||||
}
|
||||
}
|
||||
this.innerHits = in.readList(InnerHitBuilder::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(field);
|
||||
out.writeVInt(maxConcurrentGroupRequests);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_5_0)) {
|
||||
out.writeList(innerHits);
|
||||
} else {
|
||||
boolean hasInnerHit = innerHits.isEmpty() == false;
|
||||
out.writeBoolean(hasInnerHit);
|
||||
if (hasInnerHit) {
|
||||
innerHits.get(0).writeToCollapseBWC(out);
|
||||
}
|
||||
}
|
||||
out.writeList(innerHits);
|
||||
}
|
||||
|
||||
public static CollapseBuilder fromXContent(XContentParser parser) {
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.fetch.subphase.highlight;
|
|||
|
||||
import org.apache.lucene.search.highlight.SimpleFragmenter;
|
||||
import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -152,17 +151,13 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
|||
order(in.readOptionalWriteable(Order::readFromStream));
|
||||
highlightFilter(in.readOptionalBoolean());
|
||||
forceSource(in.readOptionalBoolean());
|
||||
if (in.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
boundaryScannerType(in.readOptionalWriteable(BoundaryScannerType::readFromStream));
|
||||
}
|
||||
boundaryScannerType(in.readOptionalWriteable(BoundaryScannerType::readFromStream));
|
||||
boundaryMaxScan(in.readOptionalVInt());
|
||||
if (in.readBoolean()) {
|
||||
boundaryChars(in.readString().toCharArray());
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
if (in.readBoolean()) {
|
||||
boundaryScannerLocale(in.readString());
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
boundaryScannerLocale(in.readString());
|
||||
}
|
||||
noMatchSize(in.readOptionalVInt());
|
||||
phraseLimit(in.readOptionalVInt());
|
||||
|
@ -191,21 +186,17 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
|||
out.writeOptionalWriteable(order);
|
||||
out.writeOptionalBoolean(highlightFilter);
|
||||
out.writeOptionalBoolean(forceSource);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
out.writeOptionalWriteable(boundaryScannerType);
|
||||
}
|
||||
out.writeOptionalWriteable(boundaryScannerType);
|
||||
out.writeOptionalVInt(boundaryMaxScan);
|
||||
boolean hasBounaryChars = boundaryChars != null;
|
||||
out.writeBoolean(hasBounaryChars);
|
||||
if (hasBounaryChars) {
|
||||
out.writeString(String.valueOf(boundaryChars));
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_4_0)) {
|
||||
boolean hasBoundaryScannerLocale = boundaryScannerLocale != null;
|
||||
out.writeBoolean(hasBoundaryScannerLocale);
|
||||
if (hasBoundaryScannerLocale) {
|
||||
out.writeString(boundaryScannerLocale.toLanguageTag());
|
||||
}
|
||||
boolean hasBoundaryScannerLocale = boundaryScannerLocale != null;
|
||||
out.writeBoolean(hasBoundaryScannerLocale);
|
||||
if (hasBoundaryScannerLocale) {
|
||||
out.writeString(boundaryScannerLocale.toLanguageTag());
|
||||
}
|
||||
out.writeOptionalVInt(noMatchSize);
|
||||
out.writeOptionalVInt(phraseLimit);
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.search.Scroll;
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Shard level search request that gets created and consumed on the local node.
|
||||
|
@ -213,25 +212,10 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
source = in.readOptionalWriteable(SearchSourceBuilder::new);
|
||||
types = in.readStringArray();
|
||||
aliasFilter = new AliasFilter(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
indexBoost = in.readFloat();
|
||||
} else {
|
||||
// Nodes < 5.2.0 doesn't send index boost. Read it from source.
|
||||
if (source != null) {
|
||||
Optional<SearchSourceBuilder.IndexBoost> boost = source.indexBoosts()
|
||||
.stream()
|
||||
.filter(ib -> ib.getIndex().equals(shardId.getIndexName()))
|
||||
.findFirst();
|
||||
indexBoost = boost.isPresent() ? boost.get().getBoost() : 1.0f;
|
||||
} else {
|
||||
indexBoost = 1.0f;
|
||||
}
|
||||
}
|
||||
indexBoost = in.readFloat();
|
||||
nowInMillis = in.readVLong();
|
||||
requestCache = in.readOptionalBoolean();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
clusterAlias = in.readOptionalString();
|
||||
}
|
||||
clusterAlias = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_3_0)) {
|
||||
allowPartialSearchResults = in.readOptionalBoolean();
|
||||
}
|
||||
|
@ -254,16 +238,12 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
out.writeOptionalWriteable(source);
|
||||
out.writeStringArray(types);
|
||||
aliasFilter.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
|
||||
out.writeFloat(indexBoost);
|
||||
}
|
||||
out.writeFloat(indexBoost);
|
||||
if (asKey == false) {
|
||||
out.writeVLong(nowInMillis);
|
||||
}
|
||||
out.writeOptionalBoolean(requestCache);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_6_0)) {
|
||||
out.writeOptionalString(clusterAlias);
|
||||
}
|
||||
out.writeOptionalString(clusterAlias);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_3_0)) {
|
||||
out.writeOptionalBoolean(allowPartialSearchResults);
|
||||
}
|
||||
|
|
|
@ -76,9 +76,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
private static final String SUCCESSFUL_SHARDS = "successful_shards";
|
||||
private static final String INCLUDE_GLOBAL_STATE = "include_global_state";
|
||||
|
||||
private static final Version VERSION_INCOMPATIBLE_INTRODUCED = Version.V_5_2_0;
|
||||
private static final Version INCLUDE_GLOBAL_STATE_INTRODUCED = Version.V_6_2_0;
|
||||
public static final Version VERBOSE_INTRODUCED = Version.V_5_5_0;
|
||||
|
||||
private static final Comparator<SnapshotInfo> COMPARATOR =
|
||||
Comparator.comparing(SnapshotInfo::startTime).thenComparing(SnapshotInfo::snapshotId);
|
||||
|
@ -275,11 +273,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
indicesListBuilder.add(in.readString());
|
||||
}
|
||||
indices = Collections.unmodifiableList(indicesListBuilder);
|
||||
if (in.getVersion().onOrAfter(VERBOSE_INTRODUCED)) {
|
||||
state = in.readBoolean() ? SnapshotState.fromValue(in.readByte()) : null;
|
||||
} else {
|
||||
state = SnapshotState.fromValue(in.readByte());
|
||||
}
|
||||
state = in.readBoolean() ? SnapshotState.fromValue(in.readByte()) : null;
|
||||
reason = in.readOptionalString();
|
||||
startTime = in.readVLong();
|
||||
endTime = in.readVLong();
|
||||
|
@ -295,11 +289,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
} else {
|
||||
shardFailures = Collections.emptyList();
|
||||
}
|
||||
if (in.getVersion().before(VERSION_INCOMPATIBLE_INTRODUCED)) {
|
||||
version = Version.readVersion(in);
|
||||
} else {
|
||||
version = in.readBoolean() ? Version.readVersion(in) : null;
|
||||
}
|
||||
version = in.readBoolean() ? Version.readVersion(in) : null;
|
||||
if (in.getVersion().onOrAfter(INCLUDE_GLOBAL_STATE_INTRODUCED)) {
|
||||
includeGlobalState = in.readOptionalBoolean();
|
||||
}
|
||||
|
@ -681,19 +671,11 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
for (String index : indices) {
|
||||
out.writeString(index);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(VERBOSE_INTRODUCED)) {
|
||||
if (state != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeByte(state.value());
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (state != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeByte(state.value());
|
||||
} else {
|
||||
if (out.getVersion().before(VERSION_INCOMPATIBLE_INTRODUCED) && state == SnapshotState.INCOMPATIBLE) {
|
||||
out.writeByte(SnapshotState.FAILED.value());
|
||||
} else {
|
||||
out.writeByte(state.value());
|
||||
}
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeOptionalString(reason);
|
||||
out.writeVLong(startTime);
|
||||
|
@ -704,19 +686,11 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
for (SnapshotShardFailure failure : shardFailures) {
|
||||
failure.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().before(VERSION_INCOMPATIBLE_INTRODUCED)) {
|
||||
Version versionToWrite = version;
|
||||
if (versionToWrite == null) {
|
||||
versionToWrite = Version.CURRENT;
|
||||
}
|
||||
Version.writeVersion(versionToWrite, out);
|
||||
if (version != null) {
|
||||
out.writeBoolean(true);
|
||||
Version.writeVersion(version, out);
|
||||
} else {
|
||||
if (version != null) {
|
||||
out.writeBoolean(true);
|
||||
Version.writeVersion(version, out);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(INCLUDE_GLOBAL_STATE_INTRODUCED)) {
|
||||
out.writeOptionalBoolean(includeGlobalState);
|
||||
|
|
|
@ -41,8 +41,6 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.breaker.CircuitBreakingException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
@ -104,7 +102,6 @@ import java.nio.file.NotDirectoryException;
|
|||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
|
@ -116,7 +113,6 @@ import static java.util.Collections.emptyMap;
|
|||
import static java.util.Collections.emptySet;
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class ExceptionSerializationTests extends ESTestCase {
|
||||
|
||||
|
@ -872,89 +868,12 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
public void testShardLockObtainFailedException() throws IOException {
|
||||
ShardId shardId = new ShardId("foo", "_na_", 1);
|
||||
ShardLockObtainFailedException orig = new ShardLockObtainFailedException(shardId, "boom");
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT);
|
||||
if (version.before(Version.V_5_0_2)) {
|
||||
version = Version.V_5_0_2;
|
||||
}
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT);
|
||||
ShardLockObtainFailedException ex = serialize(orig, version);
|
||||
assertEquals(orig.getMessage(), ex.getMessage());
|
||||
assertEquals(orig.getShardId(), ex.getShardId());
|
||||
}
|
||||
|
||||
public void testBWCShardLockObtainFailedException() throws IOException {
|
||||
ShardId shardId = new ShardId("foo", "_na_", 1);
|
||||
ShardLockObtainFailedException orig = new ShardLockObtainFailedException(shardId, "boom");
|
||||
Exception ex = serialize((Exception)orig, randomFrom(Version.V_5_0_0, Version.V_5_0_1));
|
||||
assertThat(ex, instanceOf(NotSerializableExceptionWrapper.class));
|
||||
assertEquals("shard_lock_obtain_failed_exception: [foo][1]: boom", ex.getMessage());
|
||||
}
|
||||
|
||||
public void testBWCHeadersAndMetadata() throws IOException {
|
||||
//this is a request serialized with headers only, no metadata as they were added in 5.3.0
|
||||
BytesReference decoded = new BytesArray(Base64.getDecoder().decode
|
||||
("AQ10ZXN0ICBtZXNzYWdlACYtb3JnLmVsYXN0aWNzZWFyY2guRXhjZXB0aW9uU2VyaWFsaXphdGlvblRlc3RzASBFeGNlcHRpb25TZXJpYWxpemF0aW9uVG" +
|
||||
"VzdHMuamF2YQR0ZXN03wYkc3VuLnJlZmxlY3QuTmF0aXZlTWV0aG9kQWNjZXNzb3JJbXBsAR1OYXRpdmVNZXRob2RBY2Nlc3NvckltcGwuamF2Y" +
|
||||
"QdpbnZva2Uw/v///w8kc3VuLnJlZmxlY3QuTmF0aXZlTWV0aG9kQWNjZXNzb3JJbXBsAR1OYXRpdmVNZXRob2RBY2Nlc3NvckltcGwuamF2YQZp" +
|
||||
"bnZva2U+KHN1bi5yZWZsZWN0LkRlbGVnYXRpbmdNZXRob2RBY2Nlc3NvckltcGwBIURlbGVnYXRpbmdNZXRob2RBY2Nlc3NvckltcGwuamF2YQZ" +
|
||||
"pbnZva2UrGGphdmEubGFuZy5yZWZsZWN0Lk1ldGhvZAELTWV0aG9kLmphdmEGaW52b2tl8QMzY29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdG" +
|
||||
"VzdGluZy5SYW5kb21pemVkUnVubmVyARVSYW5kb21pemVkUnVubmVyLmphdmEGaW52b2tlsQ01Y29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkd" +
|
||||
"GVzdGluZy5SYW5kb21pemVkUnVubmVyJDgBFVJhbmRvbWl6ZWRSdW5uZXIuamF2YQhldmFsdWF0ZYsHNWNvbS5jYXJyb3RzZWFyY2gucmFuZG9t" +
|
||||
"aXplZHRlc3RpbmcuUmFuZG9taXplZFJ1bm5lciQ5ARVSYW5kb21pemVkUnVubmVyLmphdmEIZXZhbHVhdGWvBzZjb20uY2Fycm90c2VhcmNoLnJ" +
|
||||
"hbmRvbWl6ZWR0ZXN0aW5nLlJhbmRvbWl6ZWRSdW5uZXIkMTABFVJhbmRvbWl6ZWRSdW5uZXIuamF2YQhldmFsdWF0Zb0HOWNvbS5jYXJyb3RzZW" +
|
||||
"FyY2gucmFuZG9taXplZHRlc3RpbmcucnVsZXMuU3RhdGVtZW50QWRhcHRlcgEVU3RhdGVtZW50QWRhcHRlci5qYXZhCGV2YWx1YXRlJDVvcmcuY" +
|
||||
"XBhY2hlLmx1Y2VuZS51dGlsLlRlc3RSdWxlU2V0dXBUZWFyZG93bkNoYWluZWQkMQEhVGVzdFJ1bGVTZXR1cFRlYXJkb3duQ2hhaW5lZC5qYXZh" +
|
||||
"CGV2YWx1YXRlMTBvcmcuYXBhY2hlLmx1Y2VuZS51dGlsLkFic3RyYWN0QmVmb3JlQWZ0ZXJSdWxlJDEBHEFic3RyYWN0QmVmb3JlQWZ0ZXJSdWx" +
|
||||
"lLmphdmEIZXZhbHVhdGUtMm9yZy5hcGFjaGUubHVjZW5lLnV0aWwuVGVzdFJ1bGVUaHJlYWRBbmRUZXN0TmFtZSQxAR5UZXN0UnVsZVRocmVhZE" +
|
||||
"FuZFRlc3ROYW1lLmphdmEIZXZhbHVhdGUwN29yZy5hcGFjaGUubHVjZW5lLnV0aWwuVGVzdFJ1bGVJZ25vcmVBZnRlck1heEZhaWx1cmVzJDEBI" +
|
||||
"1Rlc3RSdWxlSWdub3JlQWZ0ZXJNYXhGYWlsdXJlcy5qYXZhCGV2YWx1YXRlQCxvcmcuYXBhY2hlLmx1Y2VuZS51dGlsLlRlc3RSdWxlTWFya0Zh" +
|
||||
"aWx1cmUkMQEYVGVzdFJ1bGVNYXJrRmFpbHVyZS5qYXZhCGV2YWx1YXRlLzljb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLnJ1bGV" +
|
||||
"zLlN0YXRlbWVudEFkYXB0ZXIBFVN0YXRlbWVudEFkYXB0ZXIuamF2YQhldmFsdWF0ZSREY29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdGVzdG" +
|
||||
"luZy5UaHJlYWRMZWFrQ29udHJvbCRTdGF0ZW1lbnRSdW5uZXIBFlRocmVhZExlYWtDb250cm9sLmphdmEDcnVu7wI0Y29tLmNhcnJvdHNlYXJja" +
|
||||
"C5yYW5kb21pemVkdGVzdGluZy5UaHJlYWRMZWFrQ29udHJvbAEWVGhyZWFkTGVha0NvbnRyb2wuamF2YRJmb3JrVGltZW91dGluZ1Rhc2urBjZj" +
|
||||
"b20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLlRocmVhZExlYWtDb250cm9sJDMBFlRocmVhZExlYWtDb250cm9sLmphdmEIZXZhbHV" +
|
||||
"hdGXOAzNjb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLlJhbmRvbWl6ZWRSdW5uZXIBFVJhbmRvbWl6ZWRSdW5uZXIuamF2YQ1ydW" +
|
||||
"5TaW5nbGVUZXN0lAc1Y29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdGVzdGluZy5SYW5kb21pemVkUnVubmVyJDUBFVJhbmRvbWl6ZWRSdW5uZ" +
|
||||
"XIuamF2YQhldmFsdWF0ZaIGNWNvbS5jYXJyb3RzZWFyY2gucmFuZG9taXplZHRlc3RpbmcuUmFuZG9taXplZFJ1bm5lciQ2ARVSYW5kb21pemVk" +
|
||||
"UnVubmVyLmphdmEIZXZhbHVhdGXUBjVjb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLlJhbmRvbWl6ZWRSdW5uZXIkNwEVUmFuZG9" +
|
||||
"taXplZFJ1bm5lci5qYXZhCGV2YWx1YXRl3wYwb3JnLmFwYWNoZS5sdWNlbmUudXRpbC5BYnN0cmFjdEJlZm9yZUFmdGVyUnVsZSQxARxBYnN0cm" +
|
||||
"FjdEJlZm9yZUFmdGVyUnVsZS5qYXZhCGV2YWx1YXRlLTljb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLnJ1bGVzLlN0YXRlbWVud" +
|
||||
"EFkYXB0ZXIBFVN0YXRlbWVudEFkYXB0ZXIuamF2YQhldmFsdWF0ZSQvb3JnLmFwYWNoZS5sdWNlbmUudXRpbC5UZXN0UnVsZVN0b3JlQ2xhc3NO" +
|
||||
"YW1lJDEBG1Rlc3RSdWxlU3RvcmVDbGFzc05hbWUuamF2YQhldmFsdWF0ZSlOY29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdGVzdGluZy5ydWx" +
|
||||
"lcy5Ob1NoYWRvd2luZ09yT3ZlcnJpZGVzT25NZXRob2RzUnVsZSQxAShOb1NoYWRvd2luZ09yT3ZlcnJpZGVzT25NZXRob2RzUnVsZS5qYXZhCG" +
|
||||
"V2YWx1YXRlKE5jb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLnJ1bGVzLk5vU2hhZG93aW5nT3JPdmVycmlkZXNPbk1ldGhvZHNSd" +
|
||||
"WxlJDEBKE5vU2hhZG93aW5nT3JPdmVycmlkZXNPbk1ldGhvZHNSdWxlLmphdmEIZXZhbHVhdGUoOWNvbS5jYXJyb3RzZWFyY2gucmFuZG9taXpl" +
|
||||
"ZHRlc3RpbmcucnVsZXMuU3RhdGVtZW50QWRhcHRlcgEVU3RhdGVtZW50QWRhcHRlci5qYXZhCGV2YWx1YXRlJDljb20uY2Fycm90c2VhcmNoLnJ" +
|
||||
"hbmRvbWl6ZWR0ZXN0aW5nLnJ1bGVzLlN0YXRlbWVudEFkYXB0ZXIBFVN0YXRlbWVudEFkYXB0ZXIuamF2YQhldmFsdWF0ZSQ5Y29tLmNhcnJvdH" +
|
||||
"NlYXJjaC5yYW5kb21pemVkdGVzdGluZy5ydWxlcy5TdGF0ZW1lbnRBZGFwdGVyARVTdGF0ZW1lbnRBZGFwdGVyLmphdmEIZXZhbHVhdGUkM29yZ" +
|
||||
"y5hcGFjaGUubHVjZW5lLnV0aWwuVGVzdFJ1bGVBc3NlcnRpb25zUmVxdWlyZWQkMQEfVGVzdFJ1bGVBc3NlcnRpb25zUmVxdWlyZWQuamF2YQhl" +
|
||||
"dmFsdWF0ZTUsb3JnLmFwYWNoZS5sdWNlbmUudXRpbC5UZXN0UnVsZU1hcmtGYWlsdXJlJDEBGFRlc3RSdWxlTWFya0ZhaWx1cmUuamF2YQhldmF" +
|
||||
"sdWF0ZS83b3JnLmFwYWNoZS5sdWNlbmUudXRpbC5UZXN0UnVsZUlnbm9yZUFmdGVyTWF4RmFpbHVyZXMkMQEjVGVzdFJ1bGVJZ25vcmVBZnRlck" +
|
||||
"1heEZhaWx1cmVzLmphdmEIZXZhbHVhdGVAMW9yZy5hcGFjaGUubHVjZW5lLnV0aWwuVGVzdFJ1bGVJZ25vcmVUZXN0U3VpdGVzJDEBHVRlc3RSd" +
|
||||
"WxlSWdub3JlVGVzdFN1aXRlcy5qYXZhCGV2YWx1YXRlNjljb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLnJ1bGVzLlN0YXRlbWVu" +
|
||||
"dEFkYXB0ZXIBFVN0YXRlbWVudEFkYXB0ZXIuamF2YQhldmFsdWF0ZSREY29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdGVzdGluZy5UaHJlYWR" +
|
||||
"MZWFrQ29udHJvbCRTdGF0ZW1lbnRSdW5uZXIBFlRocmVhZExlYWtDb250cm9sLmphdmEDcnVu7wIQamF2YS5sYW5nLlRocmVhZAELVGhyZWFkLm" +
|
||||
"phdmEDcnVu6QUABAdoZWFkZXIyAQZ2YWx1ZTIKZXMuaGVhZGVyMwEGdmFsdWUzB2hlYWRlcjEBBnZhbHVlMQplcy5oZWFkZXI0AQZ2YWx1ZTQAA" +
|
||||
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +
|
||||
"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +
|
||||
"AAAAA"));
|
||||
|
||||
try (StreamInput in = decoded.streamInput()) {
|
||||
//randomize the version across released and unreleased ones
|
||||
Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
in.setVersion(version);
|
||||
ElasticsearchException exception = new ElasticsearchException(in);
|
||||
assertEquals("test message", exception.getMessage());
|
||||
//the headers received as part of a single set get split based on their prefix
|
||||
assertEquals(2, exception.getHeaderKeys().size());
|
||||
assertEquals("value1", exception.getHeader("header1").get(0));
|
||||
assertEquals("value2", exception.getHeader("header2").get(0));
|
||||
assertEquals(2, exception.getMetadataKeys().size());
|
||||
assertEquals("value3", exception.getMetadata("es.header3").get(0));
|
||||
assertEquals("value4", exception.getMetadata("es.header4").get(0));
|
||||
}
|
||||
}
|
||||
|
||||
private static class UnknownException extends Exception {
|
||||
UnknownException(final String message, final Exception cause) {
|
||||
super(message, cause);
|
||||
|
|
|
@ -36,8 +36,8 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.Version.V_5_3_0;
|
||||
import static org.elasticsearch.Version.V_6_0_0_beta1;
|
||||
import static org.elasticsearch.Version.V_6_3_0;
|
||||
import static org.elasticsearch.Version.V_7_0_0_alpha1;
|
||||
import static org.elasticsearch.test.VersionUtils.allVersions;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
@ -50,30 +50,30 @@ import static org.hamcrest.Matchers.sameInstance;
|
|||
public class VersionTests extends ESTestCase {
|
||||
|
||||
public void testVersionComparison() throws Exception {
|
||||
assertThat(V_5_3_0.before(V_6_0_0_beta1), is(true));
|
||||
assertThat(V_5_3_0.before(V_5_3_0), is(false));
|
||||
assertThat(V_6_0_0_beta1.before(V_5_3_0), is(false));
|
||||
assertThat(V_6_3_0.before(V_7_0_0_alpha1), is(true));
|
||||
assertThat(V_6_3_0.before(V_6_3_0), is(false));
|
||||
assertThat(V_7_0_0_alpha1.before(V_6_3_0), is(false));
|
||||
|
||||
assertThat(V_5_3_0.onOrBefore(V_6_0_0_beta1), is(true));
|
||||
assertThat(V_5_3_0.onOrBefore(V_5_3_0), is(true));
|
||||
assertThat(V_6_0_0_beta1.onOrBefore(V_5_3_0), is(false));
|
||||
assertThat(V_6_3_0.onOrBefore(V_7_0_0_alpha1), is(true));
|
||||
assertThat(V_6_3_0.onOrBefore(V_6_3_0), is(true));
|
||||
assertThat(V_7_0_0_alpha1.onOrBefore(V_6_3_0), is(false));
|
||||
|
||||
assertThat(V_5_3_0.after(V_6_0_0_beta1), is(false));
|
||||
assertThat(V_5_3_0.after(V_5_3_0), is(false));
|
||||
assertThat(V_6_0_0_beta1.after(V_5_3_0), is(true));
|
||||
assertThat(V_6_3_0.after(V_7_0_0_alpha1), is(false));
|
||||
assertThat(V_6_3_0.after(V_6_3_0), is(false));
|
||||
assertThat(V_7_0_0_alpha1.after(V_6_3_0), is(true));
|
||||
|
||||
assertThat(V_5_3_0.onOrAfter(V_6_0_0_beta1), is(false));
|
||||
assertThat(V_5_3_0.onOrAfter(V_5_3_0), is(true));
|
||||
assertThat(V_6_0_0_beta1.onOrAfter(V_5_3_0), is(true));
|
||||
assertThat(V_6_3_0.onOrAfter(V_7_0_0_alpha1), is(false));
|
||||
assertThat(V_6_3_0.onOrAfter(V_6_3_0), is(true));
|
||||
assertThat(V_7_0_0_alpha1.onOrAfter(V_6_3_0), is(true));
|
||||
|
||||
assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1")));
|
||||
assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2")));
|
||||
assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24")));
|
||||
assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0")));
|
||||
|
||||
assertThat(V_5_3_0, is(lessThan(V_6_0_0_beta1)));
|
||||
assertThat(V_5_3_0.compareTo(V_5_3_0), is(0));
|
||||
assertThat(V_6_0_0_beta1, is(greaterThan(V_5_3_0)));
|
||||
assertThat(V_6_3_0, is(lessThan(V_7_0_0_alpha1)));
|
||||
assertThat(V_6_3_0.compareTo(V_6_3_0), is(0));
|
||||
assertThat(V_7_0_0_alpha1, is(greaterThan(V_6_3_0)));
|
||||
}
|
||||
|
||||
public void testMin() {
|
||||
|
@ -101,12 +101,12 @@ public class VersionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testMinimumIndexCompatibilityVersion() {
|
||||
assertEquals(Version.V_5_0_0, Version.V_6_0_0_beta1.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.fromId(2000099), Version.V_5_0_0.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.fromId(5000099), Version.V_6_0_0_beta1.minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.fromId(2000099), Version.fromId(5000099).minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.fromId(2000099),
|
||||
Version.V_5_1_1.minimumIndexCompatibilityVersion());
|
||||
Version.fromId(5010000).minimumIndexCompatibilityVersion());
|
||||
assertEquals(Version.fromId(2000099),
|
||||
Version.V_5_0_0_alpha1.minimumIndexCompatibilityVersion());
|
||||
Version.fromId(5000001).minimumIndexCompatibilityVersion());
|
||||
}
|
||||
|
||||
public void testVersionConstantPresent() {
|
||||
|
@ -160,31 +160,38 @@ public class VersionTests extends ESTestCase {
|
|||
|
||||
public void testIndexCreatedVersion() {
|
||||
// an actual index has a IndexMetaData.SETTING_INDEX_UUID
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_2,
|
||||
Version.V_5_2_0, Version.V_6_0_0_beta1);
|
||||
final Version version = Version.V_6_0_0_beta1;
|
||||
assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build()));
|
||||
}
|
||||
|
||||
public void testMinCompatVersion() {
|
||||
Version prerelease = VersionUtils.getFirstVersion();
|
||||
assertThat(prerelease.minimumCompatibilityVersion(), equalTo(prerelease));
|
||||
Version major = Version.fromString("2.0.0");
|
||||
assertThat(Version.fromString("2.0.0").minimumCompatibilityVersion(), equalTo(major));
|
||||
assertThat(Version.fromString("2.2.0").minimumCompatibilityVersion(), equalTo(major));
|
||||
assertThat(Version.fromString("2.3.0").minimumCompatibilityVersion(), equalTo(major));
|
||||
// from 6.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is
|
||||
|
||||
Version major5x = Version.fromString("5.0.0");
|
||||
assertThat(Version.fromString("5.0.0").minimumCompatibilityVersion(), equalTo(major5x));
|
||||
assertThat(Version.fromString("5.2.0").minimumCompatibilityVersion(), equalTo(major5x));
|
||||
assertThat(Version.fromString("5.3.0").minimumCompatibilityVersion(), equalTo(major5x));
|
||||
|
||||
Version major56x = Version.fromString("5.6.0");
|
||||
assertThat(Version.V_6_5_0.minimumCompatibilityVersion(), equalTo(major56x));
|
||||
assertThat(Version.V_6_3_1.minimumCompatibilityVersion(), equalTo(major56x));
|
||||
|
||||
// from 7.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is
|
||||
// released since we need to bump the supported minor in Version#minimumCompatibilityVersion()
|
||||
Version lastVersion = Version.V_5_6_0; // TODO: remove this once min compat version is a constant instead of method
|
||||
assertEquals(lastVersion.major, Version.V_6_0_0_beta1.minimumCompatibilityVersion().major);
|
||||
Version lastVersion = Version.V_6_5_0; // TODO: remove this once min compat version is a constant instead of method
|
||||
assertEquals(lastVersion.major, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().major);
|
||||
assertEquals("did you miss to bump the minor in Version#minimumCompatibilityVersion()",
|
||||
lastVersion.minor, Version.V_6_0_0_beta1.minimumCompatibilityVersion().minor);
|
||||
assertEquals(0, Version.V_6_0_0_beta1.minimumCompatibilityVersion().revision);
|
||||
lastVersion.minor, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().minor);
|
||||
assertEquals(0, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().revision);
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
// with 2.0.beta we lowercase
|
||||
assertEquals("2.0.0-beta1", Version.fromString("2.0.0-beta1").toString());
|
||||
assertEquals("5.0.0-alpha1", Version.V_5_0_0_alpha1.toString());
|
||||
assertEquals("5.0.0-alpha1", Version.fromId(5000001).toString());
|
||||
assertEquals("2.3.0", Version.fromString("2.3.0").toString());
|
||||
assertEquals("0.90.0.Beta1", Version.fromString("0.90.0.Beta1").toString());
|
||||
assertEquals("1.0.0.Beta1", Version.fromString("1.0.0.Beta1").toString());
|
||||
|
@ -334,11 +341,11 @@ public class VersionTests extends ESTestCase {
|
|||
|
||||
public void testIsCompatible() {
|
||||
assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion()));
|
||||
assertTrue(isCompatible(Version.V_5_6_0, Version.V_6_0_0_alpha2));
|
||||
assertFalse(isCompatible(Version.fromId(2000099), Version.V_6_0_0_alpha2));
|
||||
assertFalse(isCompatible(Version.fromId(2000099), Version.V_5_0_0));
|
||||
assertFalse(isCompatible(Version.fromString("6.0.0"), Version.fromString("7.0.0")));
|
||||
assertFalse(isCompatible(Version.fromString("6.0.0-alpha1"), Version.fromString("7.0.0")));
|
||||
assertTrue(isCompatible(Version.V_6_5_0, Version.V_7_0_0_alpha1));
|
||||
assertFalse(isCompatible(Version.fromId(2000099), Version.V_7_0_0_alpha1));
|
||||
assertFalse(isCompatible(Version.fromId(2000099), Version.V_6_5_0));
|
||||
assertFalse(isCompatible(Version.fromString("7.0.0"), Version.fromString("8.0.0")));
|
||||
assertFalse(isCompatible(Version.fromString("7.0.0-alpha1"), Version.fromString("8.0.0")));
|
||||
|
||||
final Version currentMajorVersion = Version.fromId(Version.CURRENT.major * 1000000 + 99);
|
||||
final Version currentOrNextMajorVersion;
|
||||
|
@ -373,8 +380,8 @@ public class VersionTests extends ESTestCase {
|
|||
isCompatible(VersionUtils.getPreviousMinorVersion(), currentOrNextMajorVersion),
|
||||
equalTo(isCompatible));
|
||||
|
||||
assertFalse(isCompatible(Version.V_5_0_0, Version.fromString("6.0.0")));
|
||||
assertFalse(isCompatible(Version.V_5_0_0, Version.fromString("7.0.0")));
|
||||
assertFalse(isCompatible(Version.fromId(5000099), Version.fromString("6.0.0")));
|
||||
assertFalse(isCompatible(Version.fromId(5000099), Version.fromString("7.0.0")));
|
||||
|
||||
Version a = randomVersion(random());
|
||||
Version b = randomVersion(random());
|
||||
|
|
|
@ -49,7 +49,6 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomLongBetween;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.emptySet;
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ public class ClusterSearchShardsRequestTests extends ESTestCase {
|
|||
request.routing(routings);
|
||||
}
|
||||
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT);
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
request.writeTo(out);
|
||||
|
|
|
@ -77,7 +77,7 @@ public class ClusterSearchShardsResponseTests extends ESTestCase {
|
|||
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
||||
entries.addAll(searchModule.getNamedWriteables());
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT);
|
||||
try(BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
clusterSearchShardsResponse.writeTo(out);
|
||||
|
@ -93,11 +93,7 @@ public class ClusterSearchShardsResponseTests extends ESTestCase {
|
|||
assertEquals(clusterSearchShardsGroup.getShardId(), deserializedGroup.getShardId());
|
||||
assertArrayEquals(clusterSearchShardsGroup.getShards(), deserializedGroup.getShards());
|
||||
}
|
||||
if (version.onOrAfter(Version.V_5_1_1)) {
|
||||
assertEquals(clusterSearchShardsResponse.getIndicesAndFilters(), deserialized.getIndicesAndFilters());
|
||||
} else {
|
||||
assertNull(deserialized.getIndicesAndFilters());
|
||||
}
|
||||
assertEquals(clusterSearchShardsResponse.getIndicesAndFilters(), deserialized.getIndicesAndFilters());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,10 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.create;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
||||
|
@ -67,25 +64,6 @@ public class CreateIndexResponseTests extends AbstractStreamableXContentTestCase
|
|||
return CreateIndexResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
public void testSerializationWithOldVersion() throws IOException {
|
||||
Version oldVersion = Version.V_5_4_0;
|
||||
CreateIndexResponse response = new CreateIndexResponse(true, true, "foo");
|
||||
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
output.setVersion(oldVersion);
|
||||
response.writeTo(output);
|
||||
|
||||
try (StreamInput in = output.bytes().streamInput()) {
|
||||
in.setVersion(oldVersion);
|
||||
CreateIndexResponse serialized = new CreateIndexResponse();
|
||||
serialized.readFrom(in);
|
||||
assertEquals(response.isShardsAcknowledged(), serialized.isShardsAcknowledged());
|
||||
assertEquals(response.isAcknowledged(), serialized.isAcknowledged());
|
||||
assertNull(serialized.index());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testToXContent() {
|
||||
CreateIndexResponse response = new CreateIndexResponse(true, false, "index_name");
|
||||
String output = Strings.toString(response);
|
||||
|
|
|
@ -19,20 +19,14 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.mapping.put;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.RandomCreateIndexGenerator;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -87,27 +81,6 @@ public class PutMappingRequestTests extends ESTestCase {
|
|||
assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage());
|
||||
}
|
||||
|
||||
public void testPutMappingRequestSerialization() throws IOException {
|
||||
PutMappingRequest request = new PutMappingRequest("foo");
|
||||
String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject());
|
||||
request.source(mapping, XContentType.YAML);
|
||||
assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.source());
|
||||
|
||||
final Version version = randomFrom(Version.CURRENT, Version.V_5_3_0, Version.V_5_3_1, Version.V_5_3_2, Version.V_5_4_0);
|
||||
try (BytesStreamOutput bytesStreamOutput = new BytesStreamOutput()) {
|
||||
bytesStreamOutput.setVersion(version);
|
||||
request.writeTo(bytesStreamOutput);
|
||||
try (StreamInput in = StreamInput.wrap(bytesStreamOutput.bytes().toBytesRef().bytes)) {
|
||||
in.setVersion(version);
|
||||
PutMappingRequest serialized = new PutMappingRequest();
|
||||
serialized.readFrom(in);
|
||||
|
||||
String source = serialized.source();
|
||||
assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
PutMappingRequest request = new PutMappingRequest("foo");
|
||||
request.type("my_type");
|
||||
|
|
|
@ -18,25 +18,16 @@
|
|||
*/
|
||||
package org.elasticsearch.action.admin.indices.template.put;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -46,81 +37,6 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
import static org.hamcrest.core.Is.is;
|
||||
|
||||
public class PutIndexTemplateRequestTests extends AbstractXContentTestCase<PutIndexTemplateRequest> {
|
||||
|
||||
// bwc for #21009
|
||||
public void testPutIndexTemplateRequest510() throws IOException {
|
||||
PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("test");
|
||||
putRequest.patterns(Collections.singletonList("test*"));
|
||||
putRequest.order(5);
|
||||
|
||||
PutIndexTemplateRequest multiPatternRequest = new PutIndexTemplateRequest("test");
|
||||
multiPatternRequest.patterns(Arrays.asList("test*", "*test2", "*test3*"));
|
||||
multiPatternRequest.order(5);
|
||||
|
||||
// These bytes were retrieved by Base64 encoding the result of the above with 5_0_0 code.
|
||||
// Note: Instead of a list for the template, in 5_0_0 the element was provided as a string.
|
||||
String putRequestBytes = "ADwDAAR0ZXN0BXRlc3QqAAAABQAAAAAAAA==";
|
||||
BytesArray bytes = new BytesArray(Base64.getDecoder().decode(putRequestBytes));
|
||||
|
||||
try (StreamInput in = bytes.streamInput()) {
|
||||
in.setVersion(Version.V_5_0_0);
|
||||
PutIndexTemplateRequest readRequest = new PutIndexTemplateRequest();
|
||||
readRequest.readFrom(in);
|
||||
assertEquals(putRequest.patterns(), readRequest.patterns());
|
||||
assertEquals(putRequest.order(), readRequest.order());
|
||||
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(Version.V_5_0_0);
|
||||
readRequest.writeTo(output);
|
||||
assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef());
|
||||
|
||||
// test that multi templates are reverse-compatible.
|
||||
// for the bwc case, if multiple patterns, use only the first pattern seen.
|
||||
output.reset();
|
||||
multiPatternRequest.writeTo(output);
|
||||
assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef());
|
||||
}
|
||||
}
|
||||
|
||||
public void testPutIndexTemplateRequestSerializationXContent() throws IOException {
|
||||
PutIndexTemplateRequest request = new PutIndexTemplateRequest("foo");
|
||||
String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject());
|
||||
request.patterns(Collections.singletonList("foo"));
|
||||
request.mapping("bar", mapping, XContentType.YAML);
|
||||
assertNotEquals(mapping, request.mappings().get("bar"));
|
||||
assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.mappings().get("bar"));
|
||||
|
||||
final Version version = randomFrom(Version.CURRENT, Version.V_5_3_0, Version.V_5_3_1, Version.V_5_3_2, Version.V_5_4_0);
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
request.writeTo(out);
|
||||
|
||||
try (StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes)) {
|
||||
in.setVersion(version);
|
||||
PutIndexTemplateRequest serialized = new PutIndexTemplateRequest();
|
||||
serialized.readFrom(in);
|
||||
assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML),
|
||||
serialized.mappings().get("bar"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testPutIndexTemplateRequestSerializationXContentBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("ADwDAANmb28IdGVtcGxhdGUAAAAAAAABA2Jhcg8tLS0KZm9vOiAiYmFyIgoAAAAAAAAAAAAAAAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
PutIndexTemplateRequest request = new PutIndexTemplateRequest();
|
||||
request.readFrom(in);
|
||||
String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject());
|
||||
assertNotEquals(mapping, request.mappings().get("bar"));
|
||||
assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.mappings().get("bar"));
|
||||
assertEquals("foo", request.name());
|
||||
assertEquals("template", request.patterns().get(0));
|
||||
}
|
||||
}
|
||||
|
||||
public void testValidateErrorMessage() throws Exception {
|
||||
PutIndexTemplateRequest request = new PutIndexTemplateRequest();
|
||||
ActionRequestValidationException withoutNameAndPattern = request.validate();
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.ingest;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -28,7 +27,6 @@ import org.elasticsearch.test.ESTestCase;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Base64;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
||||
|
@ -68,22 +66,4 @@ public class SimulatePipelineRequestTests extends ESTestCase {
|
|||
assertEquals(XContentType.JSON, serialized.getXContentType());
|
||||
assertEquals("{}", serialized.getSource().utf8ToString());
|
||||
}
|
||||
|
||||
public void testSerializationWithXContentBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("AAAAAnt9AAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
SimulatePipelineRequest request = new SimulatePipelineRequest(in);
|
||||
assertEquals(XContentType.JSON, request.getXContentType());
|
||||
assertEquals("{}", request.getSource().utf8ToString());
|
||||
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
request.writeTo(out);
|
||||
assertArrayEquals(data, out.bytes().toBytesRef().bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.search.SearchShardTarget;
|
|||
import org.elasticsearch.search.internal.AliasFilter;
|
||||
import org.elasticsearch.search.internal.ShardSearchTransportRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -110,17 +109,6 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testOldNodesTriggerException() {
|
||||
SearchTransportService searchTransportService = new SearchTransportService(
|
||||
Settings.builder().put("search.remote.connect", false).build(), null, null);
|
||||
DiscoveryNode node = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), VersionUtils.randomVersionBetween(random(),
|
||||
VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_6_0)));
|
||||
SearchAsyncActionTests.MockConnection mockConnection = new SearchAsyncActionTests.MockConnection(node);
|
||||
IllegalArgumentException illegalArgumentException = expectThrows(IllegalArgumentException.class,
|
||||
() -> searchTransportService.sendCanMatch(mockConnection, null, null, null));
|
||||
assertEquals("can_match is not supported on pre 5.6 nodes", illegalArgumentException.getMessage());
|
||||
}
|
||||
|
||||
public void testFilterWithFailure() throws InterruptedException {
|
||||
final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider(0, System.nanoTime(),
|
||||
System::nanoTime);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
@ -47,13 +46,11 @@ import org.elasticsearch.search.suggest.Suggest;
|
|||
import org.elasticsearch.search.suggest.SuggestTests;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -290,27 +287,4 @@ public class SearchResponseTests extends ESTestCase {
|
|||
assertEquals(searchResponse.getClusters(), serialized.getClusters());
|
||||
}
|
||||
}
|
||||
|
||||
public void testSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("AAAAAAAAAAAAAgABBQUAAAoAAAAAAAAA");
|
||||
final Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_6_5, Version.V_6_0_0);
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(data), namedWriteableRegistry)) {
|
||||
in.setVersion(version);
|
||||
SearchResponse deserialized = new SearchResponse();
|
||||
deserialized.readFrom(in);
|
||||
assertSame(SearchResponse.Clusters.EMPTY, deserialized.getClusters());
|
||||
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
deserialized.writeTo(out);
|
||||
try (StreamInput in2 = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytesRef().bytes),
|
||||
namedWriteableRegistry)) {
|
||||
in2.setVersion(version);
|
||||
SearchResponse deserialized2 = new SearchResponse();
|
||||
deserialized2.readFrom(in2);
|
||||
assertSame(SearchResponse.Clusters.EMPTY, deserialized2.getClusters());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,14 +36,11 @@ import org.apache.lucene.search.ScoreDoc;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.termvectors.TermVectorsRequest.Flag;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -60,7 +57,6 @@ import java.io.ByteArrayInputStream;
|
|||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
@ -264,34 +260,6 @@ public class TermVectorsUnitTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testStreamRequestWithXContentBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("AAABBWluZGV4BHR5cGUCaWQBAnt9AAABDnNvbWVQcmVmZXJlbmNlFgAAAAEA//////////0AAAA=");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
TermVectorsRequest request = new TermVectorsRequest();
|
||||
request.readFrom(in);
|
||||
assertEquals("index", request.index());
|
||||
assertEquals("type", request.type());
|
||||
assertEquals("id", request.id());
|
||||
assertTrue(request.offsets());
|
||||
assertFalse(request.fieldStatistics());
|
||||
assertTrue(request.payloads());
|
||||
assertFalse(request.positions());
|
||||
assertTrue(request.termStatistics());
|
||||
assertEquals("somePreference", request.preference());
|
||||
assertEquals("{}", request.doc().utf8ToString());
|
||||
assertEquals(XContentType.JSON, request.xContentType());
|
||||
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
request.writeTo(out);
|
||||
assertArrayEquals(data, out.bytes().toBytesRef().bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testFieldTypeToTermVectorString() throws Exception {
|
||||
FieldType ft = new FieldType();
|
||||
ft.setStoreTermVectorOffsets(false);
|
||||
|
|
|
@ -18,12 +18,9 @@
|
|||
*/
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -35,62 +32,15 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.cluster.metadata.AliasMetaData.newAliasMetaDataBuilder;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
|
||||
public class IndexTemplateMetaDataTests extends ESTestCase {
|
||||
|
||||
// bwc for #21009
|
||||
public void testIndexTemplateMetaData510() throws IOException {
|
||||
IndexTemplateMetaData metaData = IndexTemplateMetaData.builder("foo")
|
||||
.patterns(Collections.singletonList("bar"))
|
||||
.order(1)
|
||||
.settings(Settings.builder()
|
||||
.put("setting1", "value1")
|
||||
.put("setting2", "value2"))
|
||||
.putAlias(newAliasMetaDataBuilder("alias-bar1")).build();
|
||||
|
||||
IndexTemplateMetaData multiMetaData = IndexTemplateMetaData.builder("foo")
|
||||
.patterns(Arrays.asList("bar", "foo"))
|
||||
.order(1)
|
||||
.settings(Settings.builder()
|
||||
.put("setting1", "value1")
|
||||
.put("setting2", "value2"))
|
||||
.putAlias(newAliasMetaDataBuilder("alias-bar1")).build();
|
||||
|
||||
// These bytes were retrieved by Base64 encoding the result of the above with 5_0_0 code
|
||||
String templateBytes = "A2ZvbwAAAAEDYmFyAghzZXR0aW5nMQEGdmFsdWUxCHNldHRpbmcyAQZ2YWx1ZTIAAQphbGlhcy1iYXIxAAAAAAA=";
|
||||
BytesArray bytes = new BytesArray(Base64.getDecoder().decode(templateBytes));
|
||||
|
||||
try (StreamInput in = bytes.streamInput()) {
|
||||
in.setVersion(Version.V_5_0_0);
|
||||
IndexTemplateMetaData readMetaData = IndexTemplateMetaData.readFrom(in);
|
||||
assertEquals(0, in.available());
|
||||
assertEquals(metaData.getName(), readMetaData.getName());
|
||||
assertEquals(metaData.getPatterns(), readMetaData.getPatterns());
|
||||
assertTrue(metaData.aliases().containsKey("alias-bar1"));
|
||||
assertEquals(1, metaData.aliases().size());
|
||||
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(Version.V_5_0_0);
|
||||
readMetaData.writeTo(output);
|
||||
assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef());
|
||||
|
||||
// test that multi templates are reverse-compatible.
|
||||
// for the bwc case, if multiple patterns, use only the first pattern seen.
|
||||
output.reset();
|
||||
multiMetaData.writeTo(output);
|
||||
assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef());
|
||||
}
|
||||
}
|
||||
|
||||
public void testIndexTemplateMetaDataXContentRoundTrip() throws Exception {
|
||||
ToXContent.Params params = new ToXContent.MapParams(singletonMap("reduce_mappings", "true"));
|
||||
|
||||
|
|
|
@ -147,7 +147,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase {
|
|||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_CREATION_DATE, 1)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, "BOOM")
|
||||
.put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1)
|
||||
.put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_6_0_0_alpha1)
|
||||
.put(indexSettings)
|
||||
.build();
|
||||
return IndexMetaData.builder(name).settings(build).build();
|
||||
|
|
|
@ -228,7 +228,7 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase {
|
|||
}
|
||||
final String id = String.format(Locale.ROOT, "node_%03d", nodeIdGenerator.incrementAndGet());
|
||||
return new DiscoveryNode(id, id, buildNewFakeTransportAddress(), Collections.emptyMap(), roles,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, null));
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, null));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -576,7 +576,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase {
|
|||
// add a single node
|
||||
clusterState = ClusterState.builder(clusterState).nodes(
|
||||
DiscoveryNodes.builder()
|
||||
.add(newNode("node1-5.x", Version.V_5_6_0)))
|
||||
.add(newNode("node1-5.x", Version.fromId(5060099))))
|
||||
.build();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(allocation.reroute(clusterState, "reroute").routingTable()).build();
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1));
|
||||
|
@ -590,7 +590,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase {
|
|||
// add another 5.6 node
|
||||
clusterState = ClusterState.builder(clusterState).nodes(
|
||||
DiscoveryNodes.builder(clusterState.nodes())
|
||||
.add(newNode("node2-5.x", Version.V_5_6_0)))
|
||||
.add(newNode("node2-5.x", Version.fromId(5060099))))
|
||||
.build();
|
||||
|
||||
// start the shards, should have 1 primary and 1 replica available
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.cluster.routing.allocation;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.shrink.ResizeAction;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ESAllocationTestCase;
|
||||
|
@ -39,7 +38,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.ResizeAllocationDeci
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.test.gateway.TestGatewayAllocator;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
@ -243,46 +241,4 @@ public class ResizeAllocationDeciderTests extends ESAllocationTestCase {
|
|||
routingAllocation).getExplanation());
|
||||
}
|
||||
}
|
||||
|
||||
public void testAllocateOnOldNode() {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
|
||||
VersionUtils.getPreviousVersion(ResizeAction.COMPATIBILITY_VERSION));
|
||||
ClusterState clusterState = createInitialClusterState(true, version);
|
||||
MetaData.Builder metaBuilder = MetaData.builder(clusterState.metaData());
|
||||
metaBuilder.put(IndexMetaData.builder("target").settings(settings(Version.CURRENT)
|
||||
.put(IndexMetaData.INDEX_RESIZE_SOURCE_NAME.getKey(), "source")
|
||||
.put(IndexMetaData.INDEX_RESIZE_SOURCE_UUID_KEY, IndexMetaData.INDEX_UUID_NA_VALUE))
|
||||
.numberOfShards(4).numberOfReplicas(0));
|
||||
MetaData metaData = metaBuilder.build();
|
||||
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(clusterState.routingTable());
|
||||
routingTableBuilder.addAsNew(metaData.index("target"));
|
||||
|
||||
clusterState = ClusterState.builder(clusterState)
|
||||
.routingTable(routingTableBuilder.build())
|
||||
.metaData(metaData).build();
|
||||
Index idx = clusterState.metaData().index("target").getIndex();
|
||||
|
||||
|
||||
ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(Settings.EMPTY);
|
||||
RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, null, 0);
|
||||
int shardId = randomIntBetween(0, 3);
|
||||
int sourceShardId = IndexMetaData.selectSplitShard(shardId, clusterState.metaData().index("source"), 4).id();
|
||||
ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, shardId), null, true, RecoverySource
|
||||
.LocalShardsRecoverySource.INSTANCE, ShardRoutingState.UNASSIGNED);
|
||||
assertEquals(Decision.YES, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation));
|
||||
|
||||
assertEquals(Decision.NO, resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"),
|
||||
routingAllocation));
|
||||
assertEquals(Decision.NO, resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"),
|
||||
routingAllocation));
|
||||
|
||||
routingAllocation.debugDecision(true);
|
||||
assertEquals("source primary is active", resizeAllocationDecider.canAllocate(shardRouting, routingAllocation).getExplanation());
|
||||
assertEquals("node [node1] is too old to split a shard",
|
||||
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"),
|
||||
routingAllocation).getExplanation());
|
||||
assertEquals("node [node2] is too old to split a shard",
|
||||
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"),
|
||||
routingAllocation).getExplanation());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.common.unit;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
|
@ -319,9 +318,4 @@ public class ByteSizeValueTests extends AbstractWireSerializingTestCase<ByteSize
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testOldSerialisation() throws IOException {
|
||||
ByteSizeValue original = createTestInstance();
|
||||
assertSerialization(original, randomFrom(Version.V_5_6_4, Version.V_5_6_5, Version.V_6_0_0, Version.V_6_0_1, Version.V_6_1_0));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ public class IndexFolderUpgraderTests extends ESTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put(nodeSettings)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0)
|
||||
.put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString())
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5))
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
|
@ -91,7 +91,7 @@ public class IndexFolderUpgraderTests extends ESTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put(nodeSettings)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0)
|
||||
.put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString())
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5))
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
|
@ -129,7 +129,7 @@ public class IndexFolderUpgraderTests extends ESTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put(nodeSettings)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5))
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.build();
|
||||
|
@ -153,7 +153,7 @@ public class IndexFolderUpgraderTests extends ESTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put(nodeSettings)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5))
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.build();
|
||||
|
|
|
@ -80,7 +80,7 @@ public class MembershipActionTests extends ESTestCase {
|
|||
|
||||
final Version maxNodeVersion = nodes.getMaxNodeVersion();
|
||||
final Version minNodeVersion = nodes.getMinNodeVersion();
|
||||
if (maxNodeVersion.onOrAfter(Version.V_6_0_0_alpha1)) {
|
||||
if (maxNodeVersion.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
final Version tooLow = getPreviousVersion(maxNodeVersion.minimumCompatibilityVersion());
|
||||
expectThrows(IllegalStateException.class, () -> {
|
||||
if (randomBoolean()) {
|
||||
|
@ -91,7 +91,7 @@ public class MembershipActionTests extends ESTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
if (minNodeVersion.before(Version.V_5_5_0)) {
|
||||
if (minNodeVersion.before(Version.V_6_0_0)) {
|
||||
Version tooHigh = incompatibleFutureVersion(minNodeVersion);
|
||||
expectThrows(IllegalStateException.class, () -> {
|
||||
if (randomBoolean()) {
|
||||
|
@ -102,7 +102,7 @@ public class MembershipActionTests extends ESTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
if (minNodeVersion.onOrAfter(Version.V_6_0_0_alpha1)) {
|
||||
if (minNodeVersion.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
Version oldMajor = randomFrom(allVersions().stream().filter(v -> v.major < 6).collect(Collectors.toList()));
|
||||
expectThrows(IllegalStateException.class, () -> MembershipAction.ensureMajorVersionBarrier(oldMajor, minNodeVersion));
|
||||
}
|
||||
|
|
|
@ -528,7 +528,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test")
|
||||
.addMapping("_doc", "field1", "type=keyword,store=true")
|
||||
.addAlias(new Alias("alias"))
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.version.created", Version.V_5_6_0.id)));
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.version.created", Version.V_6_0_0.id)));
|
||||
// multi types in 5.6
|
||||
|
||||
client().prepareIndex("test", "_doc", "1")
|
||||
|
|
|
@ -146,15 +146,4 @@ public class IndexSortSettingsTests extends ESTestCase {
|
|||
assertThat(exc.getMessage(), containsString("Illegal missing value:[default]," +
|
||||
" must be one of [_last, _first]"));
|
||||
}
|
||||
|
||||
public void testInvalidVersion() throws IOException {
|
||||
final Settings settings = Settings.builder()
|
||||
.put("index.sort.field", "field1")
|
||||
.build();
|
||||
IllegalArgumentException exc =
|
||||
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings, Version.V_5_4_0));
|
||||
assertThat(exc.getMessage(),
|
||||
containsString("unsupported index.version.created:5.4.0, " +
|
||||
"can't set index.sort on versions prior to 6.0.0-alpha1"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ public class AnalysisRegistryTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultIndexAnalyzerIsUnsupported() {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
AnalyzerProvider<?> defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer());
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
|
|
|
@ -56,21 +56,21 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() {
|
||||
assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT),
|
||||
is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_5_0_0)));
|
||||
is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_6_0_0)));
|
||||
}
|
||||
|
||||
public void testThatInstancesAreCachedAndReused() {
|
||||
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT));
|
||||
// same es version should be cached
|
||||
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1));
|
||||
assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_0),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_1));
|
||||
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_2_1),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_2_1));
|
||||
assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_0_0),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_0_1));
|
||||
|
||||
// Same Lucene version should be cached:
|
||||
assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_1),
|
||||
PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_2));
|
||||
assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_6_2_1),
|
||||
PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_6_2_2));
|
||||
}
|
||||
|
||||
public void testThatAnalyzersAreUsedInMapping() throws IOException {
|
||||
|
|
|
@ -40,18 +40,11 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
templateDef.put("random_param", "random_value");
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1));
|
||||
() -> DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1));
|
||||
assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testParseUnknownMatchType() {
|
||||
Map<String, Object> templateDef = new HashMap<>();
|
||||
templateDef.put("match_mapping_type", "short");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
// if a wrong match type is specified, we ignore the template
|
||||
assertNull(DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5));
|
||||
assertWarnings("match_mapping_type [short] is invalid and will be ignored: No field type matched on [short], " +
|
||||
"possible values are [object, string, long, double, boolean, date, binary]");
|
||||
Map<String, Object> templateDef2 = new HashMap<>();
|
||||
templateDef2.put("match_mapping_type", "text");
|
||||
templateDef2.put("mapping", Collections.singletonMap("store", true));
|
||||
|
@ -79,7 +72,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
Map<String, Object> templateDef = new HashMap<>();
|
||||
templateDef.put("match_mapping_type", "*");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5);
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1);
|
||||
assertTrue(template.match("a.b", "b", randomFrom(XContentFieldType.values())));
|
||||
}
|
||||
|
||||
|
@ -87,7 +80,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
Map<String, Object> templateDef = new HashMap<>();
|
||||
templateDef.put("match_mapping_type", "string");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5);
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1);
|
||||
assertTrue(template.match("a.b", "b", XContentFieldType.STRING));
|
||||
assertFalse(template.match("a.b", "b", XContentFieldType.BOOLEAN));
|
||||
}
|
||||
|
@ -97,7 +90,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
Map<String, Object> templateDef = new HashMap<>();
|
||||
templateDef.put("match_mapping_type", "string");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1);
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1);
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", Strings.toString(builder));
|
||||
|
@ -107,7 +100,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
templateDef.put("match", "*name");
|
||||
templateDef.put("unmatch", "first_name");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1);
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", Strings.toString(builder));
|
||||
|
@ -117,7 +110,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
templateDef.put("path_match", "*name");
|
||||
templateDef.put("path_unmatch", "first_name");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1);
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"path_match\":\"*name\",\"path_unmatch\":\"first_name\",\"mapping\":{\"store\":true}}",
|
||||
|
@ -128,7 +121,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
templateDef.put("match", "^a$");
|
||||
templateDef.put("match_pattern", "regex");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1);
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", Strings.toString(builder));
|
||||
|
|
|
@ -57,7 +57,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testExternalValues() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
|
||||
Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
IndexService indexService = createIndex("test", settings);
|
||||
|
@ -107,7 +107,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testExternalValuesWithMultifield() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
|
||||
Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
IndexService indexService = createIndex("test", settings);
|
||||
|
@ -173,7 +173,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testExternalValuesWithMultifieldTwoLevels() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
|
||||
Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
IndexService indexService = createIndex("test", settings);
|
||||
|
|
|
@ -61,7 +61,7 @@ public class TypeFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testDocValues(boolean singleType) throws IOException {
|
||||
Settings indexSettings = singleType ? Settings.EMPTY : Settings.builder()
|
||||
.put("index.version.created", Version.V_5_6_0)
|
||||
.put("index.version.created", Version.V_6_0_0)
|
||||
.build();
|
||||
MapperService mapperService = createIndex("test", indexSettings).mapperService();
|
||||
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -366,9 +365,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
|
||||
public void testMatchPhrasePrefixWithBoost() throws Exception {
|
||||
QueryShardContext context = createShardContext();
|
||||
assumeTrue("test runs only when the index version is on or after V_5_0_0_alpha1",
|
||||
context.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1));
|
||||
|
||||
{
|
||||
// field boost is applied on a single term query
|
||||
MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo");
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
|
||||
|
@ -36,13 +35,11 @@ import org.elasticsearch.action.termvectors.TermVectorsResponse;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
|
||||
|
@ -52,7 +49,6 @@ import org.junit.Before;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
|
@ -333,26 +329,6 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
assertEquals(expectedItem, newItem);
|
||||
}
|
||||
|
||||
public void testItemSerializationBwc() throws IOException {
|
||||
final byte[] data = Base64.getDecoder().decode("AQVpbmRleAEEdHlwZQEODXsiZm9vIjoiYmFyIn0A/wD//////////QAAAAAAAAAA");
|
||||
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
|
||||
Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0);
|
||||
try (StreamInput in = StreamInput.wrap(data)) {
|
||||
in.setVersion(version);
|
||||
Item item = new Item(in);
|
||||
assertEquals(XContentType.JSON, item.xContentType());
|
||||
assertEquals("{\"foo\":\"bar\"}", item.doc().utf8ToString());
|
||||
assertEquals("index", item.index());
|
||||
assertEquals("type", item.type());
|
||||
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.setVersion(version);
|
||||
item.writeTo(out);
|
||||
assertArrayEquals(data, out.bytes().toBytesRef().bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isCachable(MoreLikeThisQueryBuilder queryBuilder) {
|
||||
return queryBuilder.likeItems().length == 0; // items are always fetched
|
||||
|
|
|
@ -124,10 +124,6 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
public void testSerializationBWC() throws IOException {
|
||||
for (Version version : VersionUtils.allReleasedVersions()) {
|
||||
NestedQueryBuilder testQuery = createTestQueryBuilder();
|
||||
if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) {
|
||||
// ignore unmapped for inner_hits has been added on 5.2
|
||||
testQuery.innerHit().setIgnoreUnmapped(false);
|
||||
}
|
||||
assertSerialization(testQuery, version);
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue