Remove build qualifier from server's Version (#35172)
With this change, `Version` no longer carries information about the qualifier, we still need a way to show the "display version" that does have both qualifier and snapshot. This is now stored by the build and red from `META-INF`.
This commit is contained in:
parent
a467a816bc
commit
8a85b2eada
|
@ -699,7 +699,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
// this doFirst is added before the info plugin, therefore it will run
|
||||
// after the doFirst added by the info plugin, and we can override attributes
|
||||
jarTask.manifest.attributes(
|
||||
'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch.replace("-SNAPSHOT", ""),
|
||||
'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch,
|
||||
'X-Compile-Lucene-Version': VersionProperties.lucene,
|
||||
'X-Compile-Elasticsearch-Snapshot': VersionProperties.isElasticsearchSnapshot(),
|
||||
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.doc
|
||||
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.test.RestTestPlugin
|
||||
import org.gradle.api.Project
|
||||
|
@ -41,8 +42,8 @@ public class DocsTestPlugin extends RestTestPlugin {
|
|||
* the values may differ. In particular {version} needs to resolve
|
||||
* to the version being built for testing but needs to resolve to
|
||||
* the last released version for docs. */
|
||||
'\\{version\\}':
|
||||
VersionProperties.elasticsearch.replace('-SNAPSHOT', ''),
|
||||
'\\{version\\}': Version.fromString(VersionProperties.elasticsearch).toString(),
|
||||
'\\{qualified_version\\}': VersionProperties.elasticsearch,
|
||||
'\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''),
|
||||
'\\{build_flavor\\}' :
|
||||
project.integTestCluster.distribution.startsWith('oss-') ? 'oss' : 'default',
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.plugin
|
||||
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.Task
|
||||
|
@ -66,17 +67,11 @@ class PluginPropertiesTask extends Copy {
|
|||
}
|
||||
|
||||
Map<String, String> generateSubstitutions() {
|
||||
def stringSnap = { version ->
|
||||
if (version.endsWith("-SNAPSHOT")) {
|
||||
return version.substring(0, version.length() - 9)
|
||||
}
|
||||
return version
|
||||
}
|
||||
return [
|
||||
'name': extension.name,
|
||||
'description': extension.description,
|
||||
'version': stringSnap(extension.version),
|
||||
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch),
|
||||
'version': extension.version,
|
||||
'elasticsearchVersion': Version.fromString(VersionProperties.elasticsearch).toString(),
|
||||
'javaVersion': project.targetCompatibility as String,
|
||||
'classname': extension.classname,
|
||||
'extendedPlugins': extension.extendedPlugins.join(','),
|
||||
|
|
|
@ -278,9 +278,13 @@ class VagrantTestPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
Task createUpgradeFromFile = project.tasks.create('createUpgradeFromFile', FileContentsTask) {
|
||||
String version = project.extensions.esvagrant.upgradeFromVersion
|
||||
if (project.bwcVersions.unreleased.contains(project.extensions.esvagrant.upgradeFromVersion)) {
|
||||
version += "-SNAPSHOT"
|
||||
}
|
||||
dependsOn copyPackagingArchives
|
||||
file "${archivesDir}/upgrade_from_version"
|
||||
contents project.extensions.esvagrant.upgradeFromVersion.toString()
|
||||
contents version
|
||||
}
|
||||
|
||||
Task createUpgradeIsOssFile = project.tasks.create('createUpgradeIsOssFile', FileContentsTask) {
|
||||
|
|
|
@ -84,8 +84,7 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
|
|||
assertNotNull(ml.description());
|
||||
assertTrue(ml.available());
|
||||
assertTrue(ml.enabled());
|
||||
assertEquals(mainResponse.getVersion().toString(),
|
||||
ml.nativeCodeInfo().get("version").toString().replace("-SNAPSHOT", ""));
|
||||
assertEquals(mainResponse.getBuild().getQualifiedVersion(), ml.nativeCodeInfo().get("version").toString());
|
||||
}
|
||||
|
||||
public void testXPackInfoEmptyRequest() throws IOException {
|
||||
|
|
|
@ -310,11 +310,11 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
|||
baseUrl = String.format(Locale.ROOT, "https://artifacts.elastic.co/downloads/elasticsearch-plugins/%s", pluginId);
|
||||
}
|
||||
final String platformUrl =
|
||||
String.format(Locale.ROOT, "%s/%s-%s-%s.zip", baseUrl, pluginId, platform, Version.displayVersion(version, isSnapshot));
|
||||
String.format(Locale.ROOT, "%s/%s-%s-%s.zip", baseUrl, pluginId, platform, Build.CURRENT.getQualifiedVersion());
|
||||
if (urlExists(terminal, platformUrl)) {
|
||||
return platformUrl;
|
||||
}
|
||||
return String.format(Locale.ROOT, "%s/%s-%s.zip", baseUrl, pluginId, Version.displayVersion(version, isSnapshot));
|
||||
return String.format(Locale.ROOT, "%s/%s-%s.zip", baseUrl, pluginId, Build.CURRENT.getQualifiedVersion());
|
||||
}
|
||||
|
||||
private String nonReleaseUrl(final String hostname, final Version version, final String stagingHash, final String pluginId) {
|
||||
|
|
|
@ -936,7 +936,8 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOfficialPlugin() throws Exception {
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" +
|
||||
Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false);
|
||||
}
|
||||
|
||||
|
@ -945,7 +946,7 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
Locale.ROOT,
|
||||
"https://snapshots.elastic.co/%s-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-%s.zip",
|
||||
Version.CURRENT,
|
||||
Version.displayVersion(Version.CURRENT, true));
|
||||
Build.CURRENT.getQualifiedVersion());
|
||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", true);
|
||||
}
|
||||
|
||||
|
@ -954,7 +955,7 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
Locale.ROOT,
|
||||
"https://snapshots.elastic.co/%s-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-%s.zip",
|
||||
Version.CURRENT,
|
||||
Version.displayVersion(Version.CURRENT, true));
|
||||
Build.CURRENT.getQualifiedVersion());
|
||||
// attemping to install a release build of a plugin (no staging ID) on a snapshot build should throw a user exception
|
||||
final UserException e =
|
||||
expectThrows(UserException.class, () -> assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, true));
|
||||
|
@ -965,13 +966,13 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
|
||||
public void testOfficialPluginStaging() throws Exception {
|
||||
String url = "https://staging.elastic.co/" + Version.CURRENT + "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
|
||||
+ Version.CURRENT + ".zip";
|
||||
+ Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", false);
|
||||
}
|
||||
|
||||
public void testOfficialPlatformPlugin() throws Exception {
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Platforms.PLATFORM_NAME +
|
||||
"-" + Version.CURRENT + ".zip";
|
||||
"-" + Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false);
|
||||
}
|
||||
|
||||
|
@ -981,13 +982,13 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
"https://snapshots.elastic.co/%s-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-%s-%s.zip",
|
||||
Version.CURRENT,
|
||||
Platforms.PLATFORM_NAME,
|
||||
Version.displayVersion(Version.CURRENT, true));
|
||||
Build.CURRENT.getQualifiedVersion());
|
||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", true);
|
||||
}
|
||||
|
||||
public void testOfficialPlatformPluginStaging() throws Exception {
|
||||
String url = "https://staging.elastic.co/" + Version.CURRENT + "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
|
||||
+ Platforms.PLATFORM_NAME + "-"+ Version.CURRENT + ".zip";
|
||||
+ Platforms.PLATFORM_NAME + "-"+ Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", false);
|
||||
}
|
||||
|
||||
|
@ -1009,10 +1010,13 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOfficialShaMissing() throws Exception {
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" +
|
||||
Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-1");
|
||||
UserException e = expectThrows(UserException.class, () ->
|
||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false, ".sha1", checksum(digest), null, (b, p) -> null));
|
||||
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false,
|
||||
".sha1", checksum(digest), null, (b, p) -> null)
|
||||
);
|
||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||
assertEquals("Plugin checksum missing: " + url + ".sha512", e.getMessage());
|
||||
}
|
||||
|
@ -1027,7 +1031,8 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInvalidShaFileMissingFilename() throws Exception {
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" +
|
||||
Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||
UserException e = expectThrows(UserException.class,
|
||||
() -> assertInstallPluginFromUrl(
|
||||
|
@ -1037,7 +1042,8 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInvalidShaFileMismatchFilename() throws Exception {
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" +
|
||||
Build.CURRENT.getQualifiedVersion()+ ".zip";
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||
UserException e = expectThrows(UserException.class, () ->
|
||||
assertInstallPluginFromUrl(
|
||||
|
@ -1047,7 +1053,7 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
null,
|
||||
false,
|
||||
".sha512",
|
||||
checksumAndString(digest, " repository-s3-" + Version.CURRENT + ".zip"),
|
||||
checksumAndString(digest, " repository-s3-" + Build.CURRENT.getQualifiedVersion() + ".zip"),
|
||||
null,
|
||||
(b, p) -> null));
|
||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||
|
@ -1055,7 +1061,8 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInvalidShaFileContainingExtraLine() throws Exception {
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" +
|
||||
Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||
UserException e = expectThrows(UserException.class, () ->
|
||||
assertInstallPluginFromUrl(
|
||||
|
@ -1065,7 +1072,7 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
null,
|
||||
false,
|
||||
".sha512",
|
||||
checksumAndString(digest, " analysis-icu-" + Version.CURRENT + ".zip\nfoobar"),
|
||||
checksumAndString(digest, " analysis-icu-" + Build.CURRENT.getQualifiedVersion() + ".zip\nfoobar"),
|
||||
null,
|
||||
(b, p) -> null));
|
||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||
|
@ -1073,7 +1080,8 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testSha512Mismatch() throws Exception {
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Version.CURRENT + ".zip";
|
||||
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" +
|
||||
Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
UserException e = expectThrows(UserException.class, () ->
|
||||
assertInstallPluginFromUrl(
|
||||
"analysis-icu",
|
||||
|
@ -1082,7 +1090,7 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
null,
|
||||
false,
|
||||
".sha512",
|
||||
bytes -> "foobar analysis-icu-" + Version.CURRENT + ".zip",
|
||||
bytes -> "foobar analysis-icu-" + Build.CURRENT.getQualifiedVersion() + ".zip",
|
||||
null,
|
||||
(b, p) -> null));
|
||||
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
|
||||
|
@ -1101,7 +1109,8 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
public void testPublicKeyIdMismatchToExpectedPublicKeyId() throws Exception {
|
||||
final String icu = "analysis-icu";
|
||||
final String url =
|
||||
"https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/" + icu + "-" + Version.CURRENT + ".zip";
|
||||
"https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/" + icu + "-" +
|
||||
Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
final MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||
/*
|
||||
* To setup a situation where the expected public key ID does not match the public key ID used for signing, we generate a new public
|
||||
|
@ -1124,7 +1133,8 @@ public class InstallPluginCommandTests extends ESTestCase {
|
|||
public void testFailedSignatureVerification() throws Exception {
|
||||
final String icu = "analysis-icu";
|
||||
final String url =
|
||||
"https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/" + icu + "-" + Version.CURRENT + ".zip";
|
||||
"https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/" + icu + "-" +
|
||||
Build.CURRENT.getQualifiedVersion() + ".zip";
|
||||
final MessageDigest digest = MessageDigest.getInstance("SHA-512");
|
||||
/*
|
||||
* To setup a situation where signature verification fails, we will mutate the input byte array by modifying a single byte to some
|
||||
|
|
|
@ -377,6 +377,7 @@ This command should give you a JSON result:
|
|||
"build_hash" : "f27399d",
|
||||
"build_date" : "2016-03-30T09:51:41.449Z",
|
||||
"build_snapshot" : false,
|
||||
"build_version" : "{qualified_version}",
|
||||
"lucene_version" : "{lucene_version}",
|
||||
"minimum_wire_compatibility_version" : "1.2.3",
|
||||
"minimum_index_compatibility_version" : "1.2.3"
|
||||
|
|
|
@ -14,24 +14,24 @@ Might look like:
|
|||
["source","txt",subs="attributes,callouts"]
|
||||
------------------------------------------------------------------------------
|
||||
name component version description
|
||||
U7321H6 analysis-icu {version} The ICU Analysis plugin integrates Lucene ICU module into elasticsearch, adding ICU relates analysis components.
|
||||
U7321H6 analysis-kuromoji {version} The Japanese (kuromoji) Analysis plugin integrates Lucene kuromoji analysis module into elasticsearch.
|
||||
U7321H6 analysis-nori {version} The Korean (nori) Analysis plugin integrates Lucene nori analysis module into elasticsearch.
|
||||
U7321H6 analysis-phonetic {version} The Phonetic Analysis plugin integrates phonetic token filter analysis with elasticsearch.
|
||||
U7321H6 analysis-smartcn {version} Smart Chinese Analysis plugin integrates Lucene Smart Chinese analysis module into elasticsearch.
|
||||
U7321H6 analysis-stempel {version} The Stempel (Polish) Analysis plugin integrates Lucene stempel (polish) analysis module into elasticsearch.
|
||||
U7321H6 analysis-ukrainian {version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.
|
||||
U7321H6 discovery-azure-classic {version} The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism
|
||||
U7321H6 discovery-ec2 {version} The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.
|
||||
U7321H6 discovery-gce {version} The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism.
|
||||
U7321H6 ingest-attachment {version} Ingest processor that uses Apache Tika to extract contents
|
||||
U7321H6 ingest-geoip {version} Ingest processor that uses looksup geo data based on ip adresses using the Maxmind geo database
|
||||
U7321H6 ingest-user-agent {version} Ingest processor that extracts information from a user agent
|
||||
U7321H6 mapper-annotated-text {version} The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.
|
||||
U7321H6 mapper-murmur3 {version} The Mapper Murmur3 plugin allows to compute hashes of a field's values at index-time and to store them in the index.
|
||||
U7321H6 mapper-size {version} The Mapper Size plugin allows document to record their uncompressed size at index time.
|
||||
U7321H6 store-smb {version} The Store SMB plugin adds support for SMB stores.
|
||||
U7321H6 transport-nio {version} The nio transport.
|
||||
U7321H6 analysis-icu {qualified_version} The ICU Analysis plugin integrates Lucene ICU module into elasticsearch, adding ICU relates analysis components.
|
||||
U7321H6 analysis-kuromoji {qualified_version} The Japanese (kuromoji) Analysis plugin integrates Lucene kuromoji analysis module into elasticsearch.
|
||||
U7321H6 analysis-nori {qualified_version} The Korean (nori) Analysis plugin integrates Lucene nori analysis module into elasticsearch.
|
||||
U7321H6 analysis-phonetic {qualified_version} The Phonetic Analysis plugin integrates phonetic token filter analysis with elasticsearch.
|
||||
U7321H6 analysis-smartcn {qualified_version} Smart Chinese Analysis plugin integrates Lucene Smart Chinese analysis module into elasticsearch.
|
||||
U7321H6 analysis-stempel {qualified_version} The Stempel (Polish) Analysis plugin integrates Lucene stempel (polish) analysis module into elasticsearch.
|
||||
U7321H6 analysis-ukrainian {qualified_version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.
|
||||
U7321H6 discovery-azure-classic {qualified_version} The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism
|
||||
U7321H6 discovery-ec2 {qualified_version} The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.
|
||||
U7321H6 discovery-gce {qualified_version} The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism.
|
||||
U7321H6 ingest-attachment {qualified_version} Ingest processor that uses Apache Tika to extract contents
|
||||
U7321H6 ingest-geoip {qualified_version} Ingest processor that uses looksup geo data based on ip adresses using the Maxmind geo database
|
||||
U7321H6 ingest-user-agent {qualified_version} Ingest processor that extracts information from a user agent
|
||||
U7321H6 mapper-annotated-text {qualified_version} The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.
|
||||
U7321H6 mapper-murmur3 {qualified_version} The Mapper Murmur3 plugin allows to compute hashes of a field's values at index-time and to store them in the index.
|
||||
U7321H6 mapper-size {qualified_version} The Mapper Size plugin allows document to record their uncompressed size at index time.
|
||||
U7321H6 store-smb {qualified_version} The Store SMB plugin adds support for SMB stores.
|
||||
U7321H6 transport-nio {qualified_version} The nio transport.
|
||||
------------------------------------------------------------------------------
|
||||
// TESTRESPONSE[s/([.()])/\\$1/ s/U7321H6/.+/ _cat]
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ which should give you a response something like this:
|
|||
"build_hash" : "f27399d",
|
||||
"build_date" : "2016-03-30T09:51:41.449Z",
|
||||
"build_snapshot" : false,
|
||||
"build_version" : "{qualified_version}",
|
||||
"lucene_version" : "{lucene_version}",
|
||||
"minimum_wire_compatibility_version" : "1.2.3",
|
||||
"minimum_index_compatibility_version" : "1.2.3"
|
||||
|
|
|
@ -33,7 +33,7 @@ public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTo
|
|||
|
||||
LegacyDelimitedPayloadTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, env, name, settings);
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException(
|
||||
"[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead");
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
this.maxGram = settings.getAsInt("max_gram", 2);
|
||||
int ngramDiff = maxGram - minGram;
|
||||
if (ngramDiff > maxAllowedNgramDiff) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException(
|
||||
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
|
||||
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["
|
||||
|
|
|
@ -91,7 +91,7 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
|
|||
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||
int ngramDiff = maxGram - minGram;
|
||||
if (ngramDiff > maxAllowedNgramDiff) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException(
|
||||
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
|
||||
+ maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the ["
|
||||
|
|
|
@ -154,7 +154,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera
|
|||
for (int i = 0; i < items.length; i++) {
|
||||
items[i] = Item.readItem(in);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
tookInMillis = in.readVLong();
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +166,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera
|
|||
for (Item item : items) {
|
||||
item.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeVLong(tookInMillis);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,7 +65,7 @@ public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory
|
|||
final Normalizer2 normalizer,
|
||||
final Settings settings) {
|
||||
String unicodeSetFilter = settings.get("unicodeSetFilter");
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
if (unicodeSetFilter != null) {
|
||||
deprecationLogger.deprecated("[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]");
|
||||
} else {
|
||||
|
|
|
@ -70,7 +70,7 @@ public class IndexingIT extends AbstractRollingTestCase {
|
|||
Version minimumIndexCompatibilityVersion = Version.CURRENT.minimumIndexCompatibilityVersion();
|
||||
assertThat("this branch is not needed if we aren't compatible with 6.0",
|
||||
minimumIndexCompatibilityVersion.onOrBefore(Version.V_6_0_0), equalTo(true));
|
||||
if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0_alpha1)) {
|
||||
if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0)) {
|
||||
XContentBuilder template = jsonBuilder();
|
||||
template.startObject();
|
||||
{
|
||||
|
|
|
@ -516,7 +516,7 @@ wait_for_elasticsearch_status() {
|
|||
# $1 - expected version
|
||||
check_elasticsearch_version() {
|
||||
local version=$1
|
||||
local versionToCheck=$(echo $version | sed -e 's/-SNAPSHOT//')
|
||||
local versionToCheck=$(echo $version | sed -e 's/-SNAPSHOT//' | sed -e 's/-\(alpha\|beta\|rc\)[0-9]//')
|
||||
|
||||
run curl -s localhost:9200
|
||||
[ "$status" -eq 0 ]
|
||||
|
|
|
@ -129,7 +129,7 @@ task startWildfly {
|
|||
assert index >= 0
|
||||
httpPort = Integer.parseInt(line.substring(index + 1))
|
||||
// set this system property so the test runner knows the port Wildfly is listening for HTTP requests on
|
||||
integTestRunner.systemProperty("tests.jboss.http.port", httpPort)
|
||||
integTestRunner.systemProperty("tests.jboss.root", "http://localhost:$httpPort/wildfly-$version/transport")
|
||||
} else if (line.matches('.*Http management interface listening on http://.*:\\d+/management$')) {
|
||||
assert managementPort == 0
|
||||
final int colonIndex = line.lastIndexOf(":")
|
||||
|
|
|
@ -27,10 +27,9 @@ import org.apache.http.entity.StringEntity;
|
|||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestRuleLimitSysouts;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterModule;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
|
@ -54,14 +53,16 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
@TestRuleLimitSysouts.Limit(bytes = 14000)
|
||||
public class WildflyIT extends LuceneTestCase {
|
||||
|
||||
Logger logger = Logger.getLogger(WildflyIT.class);
|
||||
|
||||
public void testTransportClient() throws URISyntaxException, IOException {
|
||||
try (CloseableHttpClient client = HttpClientBuilder.create().build()) {
|
||||
final String str = String.format(
|
||||
Locale.ROOT,
|
||||
"http://localhost:%d/wildfly-%s%s/transport/employees/1",
|
||||
Integer.parseInt(System.getProperty("tests.jboss.http.port")),
|
||||
Version.CURRENT,
|
||||
Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "");
|
||||
"%s/employees/1",
|
||||
System.getProperty("tests.jboss.root")
|
||||
);
|
||||
logger.info("Connecting to uri: " + str);
|
||||
final HttpPut put = new HttpPut(new URI(str));
|
||||
final String body;
|
||||
try (XContentBuilder builder = jsonBuilder()) {
|
||||
|
|
|
@ -114,6 +114,7 @@ public class Build {
|
|||
final String shortHash;
|
||||
final String date;
|
||||
final boolean isSnapshot;
|
||||
final String version;
|
||||
|
||||
flavor = Flavor.fromDisplayName(System.getProperty("es.distribution.flavor", "unknown"));
|
||||
type = Type.fromDisplayName(System.getProperty("es.distribution.type", "unknown"));
|
||||
|
@ -121,12 +122,16 @@ public class Build {
|
|||
final String esPrefix = "elasticsearch-" + Version.CURRENT;
|
||||
final URL url = getElasticsearchCodeSourceLocation();
|
||||
final String urlStr = url == null ? "" : url.toString();
|
||||
if (urlStr.startsWith("file:/") && (urlStr.endsWith(esPrefix + ".jar") || urlStr.endsWith(esPrefix + "-SNAPSHOT.jar"))) {
|
||||
if (urlStr.startsWith("file:/") && (
|
||||
urlStr.endsWith(esPrefix + ".jar") ||
|
||||
urlStr.matches("(.*)" + esPrefix + "(-)?((alpha|beta|rc)[0-9]+)?(-SNAPSHOT)?.jar")
|
||||
)) {
|
||||
try (JarInputStream jar = new JarInputStream(FileSystemUtils.openFileURLStream(url))) {
|
||||
Manifest manifest = jar.getManifest();
|
||||
shortHash = manifest.getMainAttributes().getValue("Change");
|
||||
date = manifest.getMainAttributes().getValue("Build-Date");
|
||||
isSnapshot = "true".equals(manifest.getMainAttributes().getValue("X-Compile-Elasticsearch-Snapshot"));
|
||||
version = manifest.getMainAttributes().getValue("X-Compile-Elasticsearch-Version");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@ -134,6 +139,7 @@ public class Build {
|
|||
// not running from the official elasticsearch jar file (unit tests, IDE, uber client jar, shadiness)
|
||||
shortHash = "Unknown";
|
||||
date = "Unknown";
|
||||
version = "Unknown";
|
||||
final String buildSnapshot = System.getProperty("build.snapshot");
|
||||
if (buildSnapshot != null) {
|
||||
try {
|
||||
|
@ -155,8 +161,12 @@ public class Build {
|
|||
throw new IllegalStateException("Error finding the build date. " +
|
||||
"Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
}
|
||||
if (version == null) {
|
||||
throw new IllegalStateException("Error finding the build version. " +
|
||||
"Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
|
||||
}
|
||||
|
||||
CURRENT = new Build(flavor, type, shortHash, date, isSnapshot);
|
||||
CURRENT = new Build(flavor, type, shortHash, date, isSnapshot, version);
|
||||
}
|
||||
|
||||
private final boolean isSnapshot;
|
||||
|
@ -175,13 +185,18 @@ public class Build {
|
|||
private final Type type;
|
||||
private final String shortHash;
|
||||
private final String date;
|
||||
private final String version;
|
||||
|
||||
public Build(final Flavor flavor, final Type type, final String shortHash, final String date, boolean isSnapshot) {
|
||||
public Build(
|
||||
final Flavor flavor, final Type type, final String shortHash, final String date, boolean isSnapshot,
|
||||
String version
|
||||
) {
|
||||
this.flavor = flavor;
|
||||
this.type = type;
|
||||
this.shortHash = shortHash;
|
||||
this.date = date;
|
||||
this.isSnapshot = isSnapshot;
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public String shortHash() {
|
||||
|
@ -208,7 +223,14 @@ public class Build {
|
|||
String hash = in.readString();
|
||||
String date = in.readString();
|
||||
boolean snapshot = in.readBoolean();
|
||||
return new Build(flavor, type, hash, date, snapshot);
|
||||
|
||||
final String version;
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
version = in.readString();
|
||||
} else {
|
||||
version = in.getVersion().toString();
|
||||
}
|
||||
return new Build(flavor, type, hash, date, snapshot, version);
|
||||
}
|
||||
|
||||
public static void writeBuild(Build build, StreamOutput out) throws IOException {
|
||||
|
@ -221,6 +243,22 @@ public class Build {
|
|||
out.writeString(build.shortHash());
|
||||
out.writeString(build.date());
|
||||
out.writeBoolean(build.isSnapshot());
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeString(build.getQualifiedVersion());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the version as considered at build time
|
||||
*
|
||||
* Offers a way to get the fully qualified version as configured by the build.
|
||||
* This will be the same as {@link Version} for production releases, but may include on of the qualifier ( e.x alpha1 )
|
||||
* or -SNAPSHOT for others.
|
||||
*
|
||||
* @return the fully qualified build
|
||||
*/
|
||||
public String getQualifiedVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public Flavor flavor() {
|
||||
|
@ -235,9 +273,18 @@ public class Build {
|
|||
return isSnapshot;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides information about the intent of the build
|
||||
*
|
||||
* @return true if the build is intended for production use
|
||||
*/
|
||||
public boolean isProductionRelease() {
|
||||
return version.matches("[0-9]+\\.[0-9]+\\.[0-9]+");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + flavor.displayName() + "][" + type.displayName + "][" + shortHash + "][" + date + "]";
|
||||
return "[" + flavor.displayName() + "][" + type.displayName + "][" + shortHash + "][" + date + "][" + version +"]";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -265,13 +312,15 @@ public class Build {
|
|||
if (!shortHash.equals(build.shortHash)) {
|
||||
return false;
|
||||
}
|
||||
if (version.equals(build.version) == false) {
|
||||
return false;
|
||||
}
|
||||
return date.equals(build.date);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(flavor, type, isSnapshot, shortHash, date);
|
||||
return Objects.hash(flavor, type, isSnapshot, shortHash, date, version);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1007,7 +1007,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
org.elasticsearch.common.xcontent.UnknownNamedObjectException::new, 148, UNKNOWN_VERSION_ADDED),
|
||||
TOO_MANY_BUCKETS_EXCEPTION(MultiBucketConsumerService.TooManyBucketsException.class,
|
||||
MultiBucketConsumerService.TooManyBucketsException::new, 149,
|
||||
Version.V_7_0_0_alpha1);
|
||||
Version.V_7_0_0);
|
||||
|
||||
final Class<? extends ElasticsearchException> exceptionClass;
|
||||
final CheckedFunction<StreamInput, ? extends ElasticsearchException, IOException> constructor;
|
||||
|
|
|
@ -111,10 +111,10 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0);
|
||||
public static final int V_6_6_0_ID = 6060099;
|
||||
public static final Version V_6_6_0 = new Version(V_6_6_0_ID, org.apache.lucene.util.Version.LUCENE_7_6_0);
|
||||
public static final int V_7_0_0_alpha1_ID = 7000001;
|
||||
public static final Version V_7_0_0_alpha1 =
|
||||
new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||
public static final Version CURRENT = V_7_0_0_alpha1;
|
||||
public static final int V_7_0_0_ID = 7000099;
|
||||
public static final Version V_7_0_0 = new Version(V_7_0_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||
public static final Version CURRENT = V_7_0_0;
|
||||
|
||||
|
||||
static {
|
||||
assert CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) : "Version must be upgraded to ["
|
||||
|
@ -127,8 +127,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
|
||||
public static Version fromId(int id) {
|
||||
switch (id) {
|
||||
case V_7_0_0_alpha1_ID:
|
||||
return V_7_0_0_alpha1;
|
||||
case V_7_0_0_ID:
|
||||
return V_7_0_0;
|
||||
case V_6_6_0_ID:
|
||||
return V_6_6_0;
|
||||
case V_6_5_0_ID:
|
||||
|
@ -395,7 +395,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
final String versionOutput = String.format(
|
||||
Locale.ROOT,
|
||||
"Version: %s, Build: %s/%s/%s/%s, JVM: %s",
|
||||
Version.displayVersion(Version.CURRENT, Build.CURRENT.isSnapshot()),
|
||||
Build.CURRENT.getQualifiedVersion(),
|
||||
Build.CURRENT.flavor().displayName(),
|
||||
Build.CURRENT.type().displayName(),
|
||||
Build.CURRENT.shortHash(),
|
||||
|
@ -429,10 +429,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
public static String displayVersion(final Version version, final boolean isSnapshot) {
|
||||
return version + (isSnapshot ? "-SNAPSHOT" : "");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
|
|
|
@ -239,7 +239,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
writeIndex = in.readOptionalBoolean();
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
originalAliases = in.readStringArray();
|
||||
}
|
||||
}
|
||||
|
@ -256,7 +256,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
out.writeOptionalBoolean(writeIndex);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeStringArray(originalAliases);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -460,7 +460,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
for (int i = 0; i < aliasesSize; i++) {
|
||||
aliases.add(Alias.read(in));
|
||||
}
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readBoolean(); // updateAllTypes
|
||||
}
|
||||
waitForActiveShards = ActiveShardCount.readFrom(in);
|
||||
|
@ -485,7 +485,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
for (Alias alias : aliases) {
|
||||
alias.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeBoolean(true); // updateAllTypes
|
||||
}
|
||||
waitForActiveShards.writeTo(out);
|
||||
|
|
|
@ -297,7 +297,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
type = in.readOptionalString();
|
||||
source = in.readString();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readBoolean(); // updateAllTypes
|
||||
}
|
||||
concreteIndex = in.readOptionalWriteable(Index::new);
|
||||
|
@ -310,7 +310,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
indicesOptions.writeIndicesOptions(out);
|
||||
out.writeOptionalString(type);
|
||||
out.writeString(source);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeBoolean(true); // updateAllTypes
|
||||
}
|
||||
out.writeOptionalWriteable(concreteIndex);
|
||||
|
|
|
@ -189,7 +189,7 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
|
|||
type = in.readString();
|
||||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readOptionalString(); // _parent
|
||||
}
|
||||
version = in.readLong();
|
||||
|
@ -202,7 +202,7 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
|
|||
out.writeString(type);
|
||||
out.writeString(id);
|
||||
out.writeOptionalString(routing());
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeOptionalString(null); // _parent
|
||||
}
|
||||
out.writeLong(version);
|
||||
|
|
|
@ -248,7 +248,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
|||
type = in.readString();
|
||||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readOptionalString();
|
||||
}
|
||||
preference = in.readOptionalString();
|
||||
|
@ -267,7 +267,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
|||
out.writeString(type);
|
||||
out.writeString(id);
|
||||
out.writeOptionalString(routing);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeOptionalString(null);
|
||||
}
|
||||
out.writeOptionalString(preference);
|
||||
|
|
|
@ -191,7 +191,7 @@ public class MultiGetRequest extends ActionRequest
|
|||
type = in.readOptionalString();
|
||||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readOptionalString(); // _parent
|
||||
}
|
||||
storedFields = in.readOptionalStringArray();
|
||||
|
@ -207,7 +207,7 @@ public class MultiGetRequest extends ActionRequest
|
|||
out.writeOptionalString(type);
|
||||
out.writeString(id);
|
||||
out.writeOptionalString(routing);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeOptionalString(null); // _parent
|
||||
}
|
||||
out.writeOptionalStringArray(storedFields);
|
||||
|
|
|
@ -514,7 +514,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
type = in.readOptionalString();
|
||||
id = in.readOptionalString();
|
||||
routing = in.readOptionalString();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readOptionalString(); // _parent
|
||||
}
|
||||
if (in.getVersion().before(Version.V_6_0_0_alpha1)) {
|
||||
|
@ -541,7 +541,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
out.writeOptionalString(type);
|
||||
out.writeOptionalString(id);
|
||||
out.writeOptionalString(routing);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeOptionalString(null); // _parent
|
||||
}
|
||||
if (out.getVersion().before(Version.V_6_0_0_alpha1)) {
|
||||
|
|
|
@ -61,6 +61,7 @@ public class MainResponse extends ActionResponse implements ToXContentObject {
|
|||
return version;
|
||||
}
|
||||
|
||||
|
||||
public ClusterName getClusterName() {
|
||||
return clusterName;
|
||||
}
|
||||
|
@ -81,7 +82,7 @@ public class MainResponse extends ActionResponse implements ToXContentObject {
|
|||
clusterName.writeTo(out);
|
||||
out.writeString(clusterUuid);
|
||||
Build.writeBuild(build, out);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeBoolean(true);
|
||||
}
|
||||
}
|
||||
|
@ -94,7 +95,7 @@ public class MainResponse extends ActionResponse implements ToXContentObject {
|
|||
clusterName = new ClusterName(in);
|
||||
clusterUuid = in.readString();
|
||||
build = Build.readBuild(in);
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readBoolean();
|
||||
}
|
||||
}
|
||||
|
@ -112,6 +113,7 @@ public class MainResponse extends ActionResponse implements ToXContentObject {
|
|||
.field("build_hash", build.shortHash())
|
||||
.field("build_date", build.date())
|
||||
.field("build_snapshot", build.isSnapshot())
|
||||
.field("build_version", build.getQualifiedVersion())
|
||||
.field("lucene_version", version.luceneVersion.toString())
|
||||
.field("minimum_wire_compatibility_version", version.minimumCompatibilityVersion().toString())
|
||||
.field("minimum_index_compatibility_version", version.minimumIndexCompatibilityVersion().toString())
|
||||
|
@ -138,7 +140,9 @@ public class MainResponse extends ActionResponse implements ToXContentObject {
|
|||
buildType == null ? Build.Type.UNKNOWN : Build.Type.fromDisplayName(buildType),
|
||||
(String) value.get("build_hash"),
|
||||
(String) value.get("build_date"),
|
||||
(boolean) value.get("build_snapshot"));
|
||||
(boolean) value.get("build_snapshot"),
|
||||
(String) value.get("build_version")
|
||||
);
|
||||
response.version = Version.fromString((String) value.get("number"));
|
||||
}, (parser, context) -> parser.map(), new ParseField("version"));
|
||||
}
|
||||
|
|
|
@ -170,7 +170,7 @@ public class MultiSearchResponse extends ActionResponse implements Iterable<Mult
|
|||
for (int i = 0; i < items.length; i++) {
|
||||
items[i] = Item.readItem(in);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
tookInMillis = in.readVLong();
|
||||
}
|
||||
}
|
||||
|
@ -182,7 +182,7 @@ public class MultiSearchResponse extends ActionResponse implements Iterable<Mult
|
|||
for (Item item : items) {
|
||||
item.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeVLong(tookInMillis);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -502,7 +502,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
}
|
||||
routing = in.readOptionalString();
|
||||
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readOptionalString(); // _parent
|
||||
}
|
||||
preference = in.readOptionalString();
|
||||
|
@ -545,7 +545,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
out.writeEnum(xContentType);
|
||||
}
|
||||
out.writeOptionalString(routing);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeOptionalString(null); // _parent
|
||||
}
|
||||
out.writeOptionalString(preference);
|
||||
|
|
|
@ -748,7 +748,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
type = in.readString();
|
||||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readOptionalString(); // _parent
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
|
@ -760,7 +760,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
doc = new IndexRequest();
|
||||
doc.readFrom(in);
|
||||
}
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
String[] fields = in.readOptionalStringArray();
|
||||
if (fields != null) {
|
||||
throw new IllegalArgumentException("[fields] is no longer supported");
|
||||
|
@ -785,7 +785,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
out.writeString(type);
|
||||
out.writeString(id);
|
||||
out.writeOptionalString(routing);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeOptionalString(null); // _parent
|
||||
}
|
||||
|
||||
|
@ -806,7 +806,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
doc.id(id);
|
||||
doc.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeOptionalStringArray(null);
|
||||
}
|
||||
out.writeOptionalWriteable(fetchSourceContext);
|
||||
|
|
|
@ -24,7 +24,6 @@ import joptsimple.OptionSpec;
|
|||
import joptsimple.OptionSpecBuilder;
|
||||
import joptsimple.util.PathConverter;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cli.EnvironmentAwareCommand;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
|
@ -102,12 +101,13 @@ class Elasticsearch extends EnvironmentAwareCommand {
|
|||
final String versionOutput = String.format(
|
||||
Locale.ROOT,
|
||||
"Version: %s, Build: %s/%s/%s/%s, JVM: %s",
|
||||
Version.displayVersion(Version.CURRENT, Build.CURRENT.isSnapshot()),
|
||||
Build.CURRENT.getQualifiedVersion(),
|
||||
Build.CURRENT.flavor().displayName(),
|
||||
Build.CURRENT.type().displayName(),
|
||||
Build.CURRENT.shortHash(),
|
||||
Build.CURRENT.date(),
|
||||
JvmInfo.jvmInfo().version());
|
||||
JvmInfo.jvmInfo().version()
|
||||
);
|
||||
terminal.println(versionOutput);
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -178,7 +178,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
out.writeOptionalString("now"); // 5.x default
|
||||
out.writeOptionalBoolean(null);
|
||||
}
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeBoolean(false); // hasParentField
|
||||
}
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
in.readOptionalString(); // defaultTimestamp
|
||||
in.readOptionalBoolean(); // ignoreMissing
|
||||
}
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
in.readBoolean(); // hasParentField
|
||||
}
|
||||
}
|
||||
|
|
|
@ -575,7 +575,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
final int numberOfShards;
|
||||
final Version indexVersionCreated =
|
||||
Version.fromId(Integer.parseInt(indexSettingsBuilder.get(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey())));
|
||||
if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) {
|
||||
if (indexVersionCreated.before(Version.V_7_0_0)) {
|
||||
numberOfShards = 5;
|
||||
} else {
|
||||
numberOfShards = 1;
|
||||
|
@ -799,7 +799,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
* the less default split operations are supported
|
||||
*/
|
||||
public static int calculateNumRoutingShards(int numShards, Version indexVersionCreated) {
|
||||
if (indexVersionCreated.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexVersionCreated.onOrAfter(Version.V_7_0_0)) {
|
||||
// only select this automatically for indices that are created on or after 7.0 this will prevent this new behaviour
|
||||
// until we have a fully upgraded cluster. Additionally it will make integratin testing easier since mixed clusters
|
||||
// will always have the behavior of the min node in the cluster.
|
||||
|
|
|
@ -40,7 +40,7 @@ public class CircuitBreakingException extends ElasticsearchException {
|
|||
super(in);
|
||||
byteLimit = in.readLong();
|
||||
bytesWanted = in.readLong();
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
durability = in.readEnum(CircuitBreaker.Durability.class);
|
||||
} else {
|
||||
durability = CircuitBreaker.Durability.PERMANENT;
|
||||
|
@ -63,7 +63,7 @@ public class CircuitBreakingException extends ElasticsearchException {
|
|||
super.writeTo(out);
|
||||
out.writeLong(byteLimit);
|
||||
out.writeLong(bytesWanted);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeEnum(durability);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -283,7 +283,7 @@ public class Lucene {
|
|||
private static TotalHits readTotalHits(StreamInput in) throws IOException {
|
||||
long totalHits = in.readVLong();
|
||||
TotalHits.Relation totalHitsRelation = TotalHits.Relation.EQUAL_TO;
|
||||
if (in.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0)) {
|
||||
totalHitsRelation = in.readEnum(TotalHits.Relation.class);
|
||||
}
|
||||
return new TotalHits(totalHits, totalHitsRelation);
|
||||
|
@ -402,7 +402,7 @@ public class Lucene {
|
|||
|
||||
private static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws IOException {
|
||||
out.writeVLong(totalHits.value);
|
||||
if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0)) {
|
||||
out.writeEnum(totalHits.relation);
|
||||
} else if (totalHits.value > 0 && totalHits.relation != TotalHits.Relation.EQUAL_TO) {
|
||||
throw new IllegalArgumentException("Cannot serialize approximate total hit counts to nodes that are on a version < 7.0.0");
|
||||
|
@ -608,7 +608,7 @@ public class Lucene {
|
|||
}
|
||||
|
||||
private static Number readExplanationValue(StreamInput in) throws IOException {
|
||||
if (in.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0)) {
|
||||
final int numberType = in.readByte();
|
||||
switch (numberType) {
|
||||
case 0:
|
||||
|
@ -640,7 +640,7 @@ public class Lucene {
|
|||
}
|
||||
|
||||
private static void writeExplanationValue(StreamOutput out, Number value) throws IOException {
|
||||
if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0)) {
|
||||
if (value instanceof Float) {
|
||||
out.writeByte((byte) 0);
|
||||
out.writeFloat(value.floatValue());
|
||||
|
|
|
@ -40,7 +40,7 @@ public class ShingleTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
|
||||
int shingleDiff = maxShingleSize - minShingleSize + (outputUnigrams ? 1 : 0);
|
||||
if (shingleDiff > maxAllowedShingleDiff) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException(
|
||||
"In Shingle TokenFilter the difference between max_shingle_size and min_shingle_size (and +1 if outputting unigrams)"
|
||||
+ " must be less than or equal to: [" + maxAllowedShingleDiff + "] but was [" + shingleDiff + "]. This limit"
|
||||
|
|
|
@ -154,7 +154,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
this.mapperRegistry = mapperRegistry;
|
||||
|
||||
if (INDEX_MAPPER_DYNAMIC_SETTING.exists(indexSettings.getSettings()) &&
|
||||
indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("Setting " + INDEX_MAPPER_DYNAMIC_SETTING.getKey() + " was removed after version 6.0.0");
|
||||
}
|
||||
|
||||
|
@ -411,7 +411,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
Map<String, DocumentMapper> results = new LinkedHashMap<>(2);
|
||||
|
||||
if (defaultMapper != null) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("The [default] mapping cannot be updated on index [" + index().getName() +
|
||||
"]: defaults mappings are not useful anymore now that indices can have at most one type.");
|
||||
} else if (reason == MergeReason.MAPPING_UPDATE) { // only log in case of explicit mapping updates
|
||||
|
|
|
@ -139,7 +139,7 @@ final class SimilarityProviders {
|
|||
if (model == null) {
|
||||
String replacement = LEGACY_BASIC_MODELS.get(basicModel);
|
||||
if (replacement != null) {
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("Basic model [" + basicModel + "] isn't supported anymore, " +
|
||||
"please use another model.");
|
||||
} else {
|
||||
|
@ -170,7 +170,7 @@ final class SimilarityProviders {
|
|||
if (effect == null) {
|
||||
String replacement = LEGACY_AFTER_EFFECTS.get(afterEffect);
|
||||
if (replacement != null) {
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("After effect [" + afterEffect +
|
||||
"] isn't supported anymore, please use another effect.");
|
||||
} else {
|
||||
|
@ -261,7 +261,7 @@ final class SimilarityProviders {
|
|||
unknownSettings.removeAll(Arrays.asList(supportedSettings));
|
||||
unknownSettings.remove("type"); // used to figure out which sim this is
|
||||
if (unknownSettings.isEmpty() == false) {
|
||||
if (version.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (version.onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("Unknown settings for similarity of type [" + type + "]: " + unknownSettings);
|
||||
} else {
|
||||
deprecationLogger.deprecated("Unknown settings for similarity of type [" + type + "]: " + unknownSettings);
|
||||
|
|
|
@ -59,7 +59,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
|||
static {
|
||||
Map<String, Function<Version, Supplier<Similarity>>> defaults = new HashMap<>();
|
||||
defaults.put(CLASSIC_SIMILARITY, version -> {
|
||||
if (version.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (version.onOrAfter(Version.V_7_0_0)) {
|
||||
return () -> {
|
||||
throw new IllegalArgumentException("The [classic] similarity may not be used anymore. Please use the [BM25] "
|
||||
+ "similarity or build a custom [scripted] similarity instead.");
|
||||
|
@ -86,7 +86,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
|||
Map<String, TriFunction<Settings, Version, ScriptService, Similarity>> builtIn = new HashMap<>();
|
||||
builtIn.put(CLASSIC_SIMILARITY,
|
||||
(settings, version, script) -> {
|
||||
if (version.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (version.onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("The [classic] similarity may not be used anymore. Please use the [BM25] "
|
||||
+ "similarity or build a custom [scripted] similarity instead.");
|
||||
} else {
|
||||
|
@ -267,7 +267,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
|||
}
|
||||
|
||||
private static void fail(Version indexCreatedVersion, String message) {
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException(message);
|
||||
} else if (indexCreatedVersion.onOrAfter(Version.V_6_5_0)) {
|
||||
deprecationLogger.deprecated(message);
|
||||
|
|
|
@ -1200,7 +1200,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
|
||||
private void write(final StreamOutput out) throws IOException {
|
||||
final int format = out.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? SERIALIZATION_FORMAT : FORMAT_6_0;
|
||||
final int format = out.getVersion().onOrAfter(Version.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0;
|
||||
out.writeVInt(format);
|
||||
out.writeString(id);
|
||||
out.writeString(type);
|
||||
|
@ -1360,7 +1360,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
|
||||
private void write(final StreamOutput out) throws IOException {
|
||||
final int format = out.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? SERIALIZATION_FORMAT : FORMAT_6_0;
|
||||
final int format = out.getVersion().onOrAfter(Version.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0;
|
||||
out.writeVInt(format);
|
||||
out.writeString(type);
|
||||
out.writeString(id);
|
||||
|
|
|
@ -124,7 +124,7 @@ public final class AnalysisModule {
|
|||
tokenFilters.register("standard", new AnalysisProvider<TokenFilterFactory>() {
|
||||
@Override
|
||||
public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_7_0_0_alpha1)) {
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_7_0_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("standard_deprecation",
|
||||
"The [standard] token filter name is deprecated and will be removed in a future version.");
|
||||
} else {
|
||||
|
@ -182,7 +182,7 @@ public final class AnalysisModule {
|
|||
// Add "standard" for old indices (bwc)
|
||||
preConfiguredTokenFilters.register( "standard",
|
||||
PreConfiguredTokenFilter.singletonWithVersion("standard", true, (reader, version) -> {
|
||||
if (version.before(Version.V_7_0_0_alpha1)) {
|
||||
if (version.before(Version.V_7_0_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("standard_deprecation",
|
||||
"The [standard] token filter is deprecated and will be removed in a future version.");
|
||||
} else {
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.util.SetOnce;
|
|||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionModule;
|
||||
import org.elasticsearch.action.search.SearchExecutionStatsCollector;
|
||||
|
@ -279,7 +278,7 @@ public class Node implements Closeable {
|
|||
final JvmInfo jvmInfo = JvmInfo.jvmInfo();
|
||||
logger.info(
|
||||
"version[{}], pid[{}], build[{}/{}/{}/{}], OS[{}/{}/{}], JVM[{}/{}/{}/{}]",
|
||||
Version.displayVersion(Version.CURRENT, Build.CURRENT.isSnapshot()),
|
||||
Build.CURRENT.getQualifiedVersion(),
|
||||
jvmInfo.pid(),
|
||||
Build.CURRENT.flavor().displayName(),
|
||||
Build.CURRENT.type().displayName(),
|
||||
|
@ -293,7 +292,11 @@ public class Node implements Closeable {
|
|||
Constants.JAVA_VERSION,
|
||||
Constants.JVM_VERSION);
|
||||
logger.info("JVM arguments {}", Arrays.toString(jvmInfo.getInputArguments()));
|
||||
warnIfPreRelease(Version.CURRENT, Build.CURRENT.isSnapshot(), logger);
|
||||
if (Build.CURRENT.isProductionRelease() == false) {
|
||||
logger.warn(
|
||||
"version [{}] is a pre-release version of Elasticsearch and is not suitable for production",
|
||||
Build.CURRENT.getQualifiedVersion());
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("using config [{}], data [{}], logs [{}], plugins [{}]",
|
||||
|
@ -577,14 +580,6 @@ public class Node implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
static void warnIfPreRelease(final Version version, final boolean isSnapshot, final Logger logger) {
|
||||
if (!version.isRelease() || isSnapshot) {
|
||||
logger.warn(
|
||||
"version [{}] is a pre-release version of Elasticsearch and is not suitable for production",
|
||||
Version.displayVersion(version, isSnapshot));
|
||||
}
|
||||
}
|
||||
|
||||
protected TransportService newTransportService(Settings settings, Transport transport, ThreadPool threadPool,
|
||||
TransportInterceptor interceptor,
|
||||
Function<BoundTransportAddress, DiscoveryNode> localNodeFactory,
|
||||
|
|
|
@ -42,14 +42,14 @@ public class ScriptStats implements Writeable, ToXContentFragment {
|
|||
public ScriptStats(StreamInput in) throws IOException {
|
||||
compilations = in.readVLong();
|
||||
cacheEvictions = in.readVLong();
|
||||
compilationLimitTriggered = in.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? in.readVLong() : 0;
|
||||
compilationLimitTriggered = in.getVersion().onOrAfter(Version.V_7_0_0) ? in.readVLong() : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(compilations);
|
||||
out.writeVLong(cacheEvictions);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeVLong(compilationLimitTriggered);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
} else {
|
||||
this.missingBucket = false;
|
||||
}
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
// skip missing value for BWC
|
||||
in.readGenericValue();
|
||||
}
|
||||
|
@ -101,7 +101,7 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
out.writeBoolean(missingBucket);
|
||||
}
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
// write missing value for BWC
|
||||
out.writeGenericValue(null);
|
||||
}
|
||||
|
|
|
@ -129,7 +129,7 @@ public class DfsSearchResult extends SearchPhaseResult {
|
|||
CollectionStatistics statistics = c.value;
|
||||
assert statistics.maxDoc() >= 0;
|
||||
out.writeVLong(statistics.maxDoc());
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
// stats are always positive numbers
|
||||
out.writeVLong(statistics.docCount());
|
||||
out.writeVLong(statistics.sumTotalTermFreq());
|
||||
|
@ -176,7 +176,7 @@ public class DfsSearchResult extends SearchPhaseResult {
|
|||
final long docCount;
|
||||
final long sumTotalTermFreq;
|
||||
final long sumDocFreq;
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
// stats are always positive numbers
|
||||
docCount = in.readVLong();
|
||||
sumTotalTermFreq = in.readVLong();
|
||||
|
|
|
@ -260,7 +260,7 @@ public class SliceBuilder implements Writeable, ToXContentObject {
|
|||
if ("_uid".equals(field)) {
|
||||
// on new indices, the _id acts as a _uid
|
||||
field = IdFieldMapper.NAME;
|
||||
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) {
|
||||
throw new IllegalArgumentException("Computing slices on the [_uid] field is illegal for 7.x indices, use [_id] instead");
|
||||
}
|
||||
DEPRECATION_LOG.deprecated("Computing slices on the [_uid] field is deprecated for 6.x indices, use [_id] instead");
|
||||
|
|
|
@ -93,7 +93,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
|
||||
public Suggest(StreamInput in) throws IOException {
|
||||
// in older versions, Suggestion types were serialized as Streamable
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
final int size = in.readVInt();
|
||||
suggestions = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -161,7 +161,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
// in older versions, Suggestion types were serialized as Streamable
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeVInt(suggestions.size());
|
||||
for (Suggestion<?> command : suggestions) {
|
||||
out.writeVInt(command.getWriteableType());
|
||||
|
@ -279,7 +279,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
size = in.readVInt();
|
||||
|
||||
// this is a hack to work around slightly different serialization order of earlier versions of TermSuggestion
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1) && this instanceof TermSuggestion) {
|
||||
if (in.getVersion().before(Version.V_7_0_0) && this instanceof TermSuggestion) {
|
||||
TermSuggestion t = (TermSuggestion) this;
|
||||
t.setSort(SortBy.readFromStream(in));
|
||||
}
|
||||
|
@ -389,7 +389,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
|||
out.writeVInt(size);
|
||||
|
||||
// this is a hack to work around slightly different serialization order in older versions of TermSuggestion
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1) && this instanceof TermSuggestion) {
|
||||
if (out.getVersion().before(Version.V_7_0_0) && this instanceof TermSuggestion) {
|
||||
TermSuggestion termSuggestion = (TermSuggestion) this;
|
||||
termSuggestion.getSort().writeTo(out);
|
||||
}
|
||||
|
|
|
@ -292,7 +292,7 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
|
|||
if (fieldName != null) {
|
||||
MappedFieldType mappedFieldType = fieldResolver.apply(fieldName);
|
||||
if (mappedFieldType == null) {
|
||||
if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) {
|
||||
if (indexVersionCreated.before(Version.V_7_0_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("geo_context_mapping",
|
||||
"field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name);
|
||||
} else {
|
||||
|
@ -300,7 +300,7 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
|
|||
"field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name);
|
||||
}
|
||||
} else if (GeoPointFieldMapper.CONTENT_TYPE.equals(mappedFieldType.typeName()) == false) {
|
||||
if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) {
|
||||
if (indexVersionCreated.before(Version.V_7_0_0)) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("geo_context_mapping",
|
||||
"field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]",
|
||||
fieldName, name, mappedFieldType.typeName());
|
||||
|
|
|
@ -62,7 +62,7 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
|
|||
public TermSuggestion(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
sort = SortBy.readFromStream(in);
|
||||
}
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
sort.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
|
|||
|
||||
public RemoteConnectionInfo(StreamInput input) throws IOException {
|
||||
seedNodes = input.readList(TransportAddress::new);
|
||||
if (input.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (input.getVersion().before(Version.V_7_0_0)) {
|
||||
/*
|
||||
* Versions before 7.0 sent the HTTP addresses of all nodes in the
|
||||
* remote cluster here but it was expensive to fetch and we
|
||||
|
@ -79,7 +79,7 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeList(seedNodes);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
/*
|
||||
* Versions before 7.0 sent the HTTP addresses of all nodes in the
|
||||
* remote cluster here but it was expensive to fetch and we
|
||||
|
|
|
@ -41,32 +41,78 @@ public class BuildTests extends ESTestCase {
|
|||
assertNotNull(Build.CURRENT.shortHash());
|
||||
}
|
||||
|
||||
public void testIsProduction() {
|
||||
Build build = new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.shortHash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), Math.abs(randomInt()) + "." + Math.abs(randomInt()) + "." + Math.abs(randomInt())
|
||||
);
|
||||
assertTrue(build.getQualifiedVersion(), build.isProductionRelease());
|
||||
|
||||
assertFalse(new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.shortHash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), "7.0.0-alpha1"
|
||||
).isProductionRelease());
|
||||
|
||||
assertFalse(new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.shortHash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), "7.0.0-alpha1-SNAPSHOT"
|
||||
).isProductionRelease());
|
||||
|
||||
assertFalse(new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.shortHash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), "7.0.0-SNAPSHOT"
|
||||
).isProductionRelease());
|
||||
|
||||
assertFalse(new Build(
|
||||
Build.CURRENT.flavor(), Build.CURRENT.type(), Build.CURRENT.shortHash(), Build.CURRENT.date(),
|
||||
Build.CURRENT.isSnapshot(), "Unknown"
|
||||
).isProductionRelease());
|
||||
}
|
||||
|
||||
public void testEqualsAndHashCode() {
|
||||
Build build = Build.CURRENT;
|
||||
|
||||
Build another = new Build(build.flavor(), build.type(), build.shortHash(), build.date(), build.isSnapshot());
|
||||
Build another = new Build(
|
||||
build.flavor(), build.type(), build.shortHash(), build.date(), build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertEquals(build, another);
|
||||
assertEquals(build.hashCode(), another.hashCode());
|
||||
|
||||
final Set<Build.Flavor> otherFlavors =
|
||||
Arrays.stream(Build.Flavor.values()).filter(f -> !f.equals(build.flavor())).collect(Collectors.toSet());
|
||||
final Build.Flavor otherFlavor = randomFrom(otherFlavors);
|
||||
Build differentFlavor = new Build(otherFlavor, build.type(), build.shortHash(), build.date(), build.isSnapshot());
|
||||
Build differentFlavor = new Build(
|
||||
otherFlavor, build.type(), build.shortHash(), build.date(), build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentFlavor);
|
||||
|
||||
final Set<Build.Type> otherTypes =
|
||||
Arrays.stream(Build.Type.values()).filter(f -> !f.equals(build.type())).collect(Collectors.toSet());
|
||||
final Build.Type otherType = randomFrom(otherTypes);
|
||||
Build differentType = new Build(build.flavor(), otherType, build.shortHash(), build.date(), build.isSnapshot());
|
||||
Build differentType = new Build(
|
||||
build.flavor(), otherType, build.shortHash(), build.date(), build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentType);
|
||||
|
||||
Build differentHash = new Build(build.flavor(), build.type(), randomAlphaOfLengthBetween(3, 10), build.date(), build.isSnapshot());
|
||||
Build differentHash = new Build(
|
||||
build.flavor(), build.type(), randomAlphaOfLengthBetween(3, 10), build.date(), build.isSnapshot(),
|
||||
build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentHash);
|
||||
|
||||
Build differentDate = new Build(build.flavor(), build.type(), build.shortHash(), "1970-01-01", build.isSnapshot());
|
||||
Build differentDate = new Build(
|
||||
build.flavor(), build.type(), build.shortHash(), "1970-01-01", build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentDate);
|
||||
|
||||
Build differentSnapshot = new Build(build.flavor(), build.type(), build.shortHash(), build.date(), !build.isSnapshot());
|
||||
Build differentSnapshot = new Build(
|
||||
build.flavor(), build.type(), build.shortHash(), build.date(), !build.isSnapshot(), build.getQualifiedVersion()
|
||||
);
|
||||
assertNotEquals(build, differentSnapshot);
|
||||
|
||||
Build differentVersion = new Build(
|
||||
build.flavor(), build.type(), build.shortHash(), build.date(), build.isSnapshot(), "7.0.0"
|
||||
);
|
||||
assertNotEquals(build, differentVersion);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -351,7 +351,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
|
||||
public void testCircuitBreakingException() throws IOException {
|
||||
CircuitBreakingException ex = serialize(new CircuitBreakingException("Too large", 0, 100, CircuitBreaker.Durability.TRANSIENT),
|
||||
Version.V_7_0_0_alpha1);
|
||||
Version.V_7_0_0);
|
||||
assertEquals("Too large", ex.getMessage());
|
||||
assertEquals(100, ex.getByteLimit());
|
||||
assertEquals(0, ex.getBytesWanted());
|
||||
|
@ -361,7 +361,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
public void testTooManyBucketsException() throws IOException {
|
||||
MultiBucketConsumerService.TooManyBucketsException ex =
|
||||
serialize(new MultiBucketConsumerService.TooManyBucketsException("Too many buckets", 100),
|
||||
randomFrom(Version.V_7_0_0_alpha1));
|
||||
randomFrom(Version.V_7_0_0));
|
||||
assertEquals("Too many buckets", ex.getMessage());
|
||||
assertEquals(100, ex.getMaxBuckets());
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.Version.V_6_3_0;
|
||||
import static org.elasticsearch.Version.V_7_0_0_alpha1;
|
||||
import static org.elasticsearch.Version.V_7_0_0;
|
||||
import static org.elasticsearch.test.VersionUtils.allVersions;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
@ -50,30 +50,30 @@ import static org.hamcrest.Matchers.sameInstance;
|
|||
public class VersionTests extends ESTestCase {
|
||||
|
||||
public void testVersionComparison() throws Exception {
|
||||
assertThat(V_6_3_0.before(V_7_0_0_alpha1), is(true));
|
||||
assertThat(V_6_3_0.before(V_7_0_0), is(true));
|
||||
assertThat(V_6_3_0.before(V_6_3_0), is(false));
|
||||
assertThat(V_7_0_0_alpha1.before(V_6_3_0), is(false));
|
||||
assertThat(V_7_0_0.before(V_6_3_0), is(false));
|
||||
|
||||
assertThat(V_6_3_0.onOrBefore(V_7_0_0_alpha1), is(true));
|
||||
assertThat(V_6_3_0.onOrBefore(V_7_0_0), is(true));
|
||||
assertThat(V_6_3_0.onOrBefore(V_6_3_0), is(true));
|
||||
assertThat(V_7_0_0_alpha1.onOrBefore(V_6_3_0), is(false));
|
||||
assertThat(V_7_0_0.onOrBefore(V_6_3_0), is(false));
|
||||
|
||||
assertThat(V_6_3_0.after(V_7_0_0_alpha1), is(false));
|
||||
assertThat(V_6_3_0.after(V_7_0_0), is(false));
|
||||
assertThat(V_6_3_0.after(V_6_3_0), is(false));
|
||||
assertThat(V_7_0_0_alpha1.after(V_6_3_0), is(true));
|
||||
assertThat(V_7_0_0.after(V_6_3_0), is(true));
|
||||
|
||||
assertThat(V_6_3_0.onOrAfter(V_7_0_0_alpha1), is(false));
|
||||
assertThat(V_6_3_0.onOrAfter(V_7_0_0), is(false));
|
||||
assertThat(V_6_3_0.onOrAfter(V_6_3_0), is(true));
|
||||
assertThat(V_7_0_0_alpha1.onOrAfter(V_6_3_0), is(true));
|
||||
assertThat(V_7_0_0.onOrAfter(V_6_3_0), is(true));
|
||||
|
||||
assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1")));
|
||||
assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2")));
|
||||
assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24")));
|
||||
assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0")));
|
||||
|
||||
assertThat(V_6_3_0, is(lessThan(V_7_0_0_alpha1)));
|
||||
assertThat(V_6_3_0, is(lessThan(V_7_0_0)));
|
||||
assertThat(V_6_3_0.compareTo(V_6_3_0), is(0));
|
||||
assertThat(V_7_0_0_alpha1, is(greaterThan(V_6_3_0)));
|
||||
assertThat(V_7_0_0, is(greaterThan(V_6_3_0)));
|
||||
}
|
||||
|
||||
public void testMin() {
|
||||
|
@ -182,10 +182,10 @@ public class VersionTests extends ESTestCase {
|
|||
// from 7.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is
|
||||
// released since we need to bump the supported minor in Version#minimumCompatibilityVersion()
|
||||
Version lastVersion = Version.V_6_6_0; // TODO: remove this once min compat version is a constant instead of method
|
||||
assertEquals(lastVersion.major, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().major);
|
||||
assertEquals(lastVersion.major, Version.V_7_0_0.minimumCompatibilityVersion().major);
|
||||
assertEquals("did you miss to bump the minor in Version#minimumCompatibilityVersion()",
|
||||
lastVersion.minor, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().minor);
|
||||
assertEquals(0, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().revision);
|
||||
lastVersion.minor, Version.V_7_0_0.minimumCompatibilityVersion().minor);
|
||||
assertEquals(0, Version.V_7_0_0.minimumCompatibilityVersion().revision);
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
|
@ -340,12 +340,10 @@ public class VersionTests extends ESTestCase {
|
|||
|
||||
public void testIsCompatible() {
|
||||
assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion()));
|
||||
assertFalse(isCompatible(Version.V_6_5_0, Version.V_7_0_0_alpha1));
|
||||
assertTrue(isCompatible(Version.V_6_6_0, Version.V_7_0_0_alpha1));
|
||||
assertFalse(isCompatible(Version.fromId(2000099), Version.V_7_0_0_alpha1));
|
||||
assertFalse(isCompatible(Version.V_6_5_0, Version.V_7_0_0));
|
||||
assertTrue(isCompatible(Version.V_6_6_0, Version.V_7_0_0));
|
||||
assertFalse(isCompatible(Version.fromId(2000099), Version.V_7_0_0));
|
||||
assertFalse(isCompatible(Version.fromId(2000099), Version.V_6_5_0));
|
||||
assertFalse(isCompatible(Version.fromString("7.0.0"), Version.fromString("8.0.0")));
|
||||
assertFalse(isCompatible(Version.fromString("7.0.0-alpha1"), Version.fromString("8.0.0")));
|
||||
|
||||
final Version currentMajorVersion = Version.fromId(Version.CURRENT.major * 1000000 + 99);
|
||||
final Version currentOrNextMajorVersion;
|
||||
|
@ -413,16 +411,4 @@ public class VersionTests extends ESTestCase {
|
|||
VersionTests.assertUnknownVersion(VERSION_5_1_0_UNRELEASED);
|
||||
}
|
||||
|
||||
public void testDisplayVersion() {
|
||||
final Version version = randomVersion(random());
|
||||
{
|
||||
final String displayVersion = Version.displayVersion(version, true);
|
||||
assertThat(displayVersion, equalTo(version.toString() + "-SNAPSHOT"));
|
||||
}
|
||||
{
|
||||
final String displayVersion = Version.displayVersion(version, false);
|
||||
assertThat(displayVersion, equalTo(version.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -41,7 +41,10 @@ public class MainResponseTests extends AbstractStreamableXContentTestCase<MainRe
|
|||
ClusterName clusterName = new ClusterName(randomAlphaOfLength(10));
|
||||
String nodeName = randomAlphaOfLength(10);
|
||||
final String date = new Date(randomNonNegativeLong()).toString();
|
||||
Build build = new Build(Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean());
|
||||
Build build = new Build(
|
||||
Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(),
|
||||
randomAlphaOfLength(12)
|
||||
);
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
return new MainResponse(nodeName, version, clusterName, clusterUuid , build);
|
||||
}
|
||||
|
@ -59,7 +62,10 @@ public class MainResponseTests extends AbstractStreamableXContentTestCase<MainRe
|
|||
public void testToXContent() throws IOException {
|
||||
String clusterUUID = randomAlphaOfLengthBetween(10, 20);
|
||||
final Build current = Build.CURRENT;
|
||||
Build build = new Build(current.flavor(), current.type(), current.shortHash(), current.date(), current.isSnapshot());
|
||||
Build build = new Build(
|
||||
current.flavor(), current.type(), current.shortHash(), current.date(), current.isSnapshot(),
|
||||
current.getQualifiedVersion()
|
||||
);
|
||||
Version version = Version.CURRENT;
|
||||
MainResponse response = new MainResponse("nodeName", version, new ClusterName("clusterName"), clusterUUID, build);
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
@ -75,6 +81,7 @@ public class MainResponseTests extends AbstractStreamableXContentTestCase<MainRe
|
|||
+ "\"build_hash\":\"" + current.shortHash() + "\","
|
||||
+ "\"build_date\":\"" + current.date() + "\","
|
||||
+ "\"build_snapshot\":" + current.isSnapshot() + ","
|
||||
+ "\"build_version\":\"" + current.getQualifiedVersion() + "\","
|
||||
+ "\"lucene_version\":\"" + version.luceneVersion.toString() + "\","
|
||||
+ "\"minimum_wire_compatibility_version\":\"" + version.minimumCompatibilityVersion().toString() + "\","
|
||||
+ "\"minimum_index_compatibility_version\":\"" + version.minimumIndexCompatibilityVersion().toString() + "\"},"
|
||||
|
@ -98,7 +105,10 @@ public class MainResponseTests extends AbstractStreamableXContentTestCase<MainRe
|
|||
break;
|
||||
case 2:
|
||||
// toggle the snapshot flag of the original Build parameter
|
||||
build = new Build(Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, build.shortHash(), build.date(), !build.isSnapshot());
|
||||
build = new Build(
|
||||
Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, build.shortHash(), build.date(), !build.isSnapshot(),
|
||||
build.getQualifiedVersion()
|
||||
);
|
||||
break;
|
||||
case 3:
|
||||
version = randomValueOtherThan(version, () -> VersionUtils.randomVersion(random()));
|
||||
|
|
|
@ -51,7 +51,7 @@ public class IndicesOptionsTests extends ESTestCase {
|
|||
public void testSerialization() throws Exception {
|
||||
int iterations = randomIntBetween(5, 20);
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
Version version = randomVersionBetween(random(), Version.V_7_0_0_alpha1, null);
|
||||
Version version = randomVersionBetween(random(), Version.V_7_0_0, null);
|
||||
IndicesOptions indicesOptions = IndicesOptions.fromOptions(
|
||||
randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(),
|
||||
randomBoolean());
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
@ -63,7 +62,7 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase {
|
|||
|
||||
private void runTestThatVersionIsReturned(String... args) throws Exception {
|
||||
runTestVersion(ExitCodes.OK, output -> {
|
||||
assertThat(output, containsString("Version: " + Version.displayVersion(Version.CURRENT, Build.CURRENT.isSnapshot())));
|
||||
assertThat(output, containsString("Version: " + Build.CURRENT.getQualifiedVersion()));
|
||||
final String expectedBuildOutput = String.format(
|
||||
Locale.ROOT,
|
||||
"Build: %s/%s/%s/%s",
|
||||
|
|
|
@ -103,12 +103,12 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase {
|
|||
{
|
||||
final Version versionCreated = VersionUtils.randomVersionBetween(
|
||||
random(),
|
||||
Version.V_6_0_0_alpha1, VersionUtils.getPreviousVersion(Version.V_7_0_0_alpha1));
|
||||
Version.V_6_0_0_alpha1, VersionUtils.getPreviousVersion(Version.V_7_0_0));
|
||||
final Settings.Builder indexSettingsBuilder = Settings.builder().put(SETTING_VERSION_CREATED, versionCreated);
|
||||
assertThat(MetaDataCreateIndexService.IndexCreationTask.getNumberOfShards(indexSettingsBuilder), equalTo(5));
|
||||
}
|
||||
{
|
||||
final Version versionCreated = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0_alpha1, Version.CURRENT);
|
||||
final Version versionCreated = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT);
|
||||
final Settings.Builder indexSettingsBuilder = Settings.builder().put(SETTING_VERSION_CREATED, versionCreated);
|
||||
assertThat(MetaDataCreateIndexService.IndexCreationTask.getNumberOfShards(indexSettingsBuilder), equalTo(1));
|
||||
}
|
||||
|
@ -445,7 +445,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase {
|
|||
assertEquals(2048, MetaDataCreateIndexService.calculateNumRoutingShards(1024, Version.CURRENT));
|
||||
assertEquals(4096, MetaDataCreateIndexService.calculateNumRoutingShards(2048, Version.CURRENT));
|
||||
|
||||
Version latestV6 = VersionUtils.getPreviousVersion(Version.V_7_0_0_alpha1);
|
||||
Version latestV6 = VersionUtils.getPreviousVersion(Version.V_7_0_0);
|
||||
int numShards = randomIntBetween(1, 1000);
|
||||
assertEquals(numShards, MetaDataCreateIndexService.calculateNumRoutingShards(numShards, latestV6));
|
||||
assertEquals(numShards, MetaDataCreateIndexService.calculateNumRoutingShards(numShards,
|
||||
|
|
|
@ -77,7 +77,7 @@ public class MembershipActionTests extends ESTestCase {
|
|||
|
||||
final Version maxNodeVersion = nodes.getMaxNodeVersion();
|
||||
final Version minNodeVersion = nodes.getMinNodeVersion();
|
||||
if (maxNodeVersion.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (maxNodeVersion.onOrAfter(Version.V_7_0_0)) {
|
||||
final Version tooLow = getPreviousVersion(maxNodeVersion.minimumCompatibilityVersion());
|
||||
expectThrows(IllegalStateException.class, () -> {
|
||||
if (randomBoolean()) {
|
||||
|
@ -99,7 +99,7 @@ public class MembershipActionTests extends ESTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
if (minNodeVersion.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (minNodeVersion.onOrAfter(Version.V_7_0_0)) {
|
||||
Version oldMajor = Version.V_6_4_0.minimumCompatibilityVersion();
|
||||
expectThrows(IllegalStateException.class, () -> MembershipAction.ensureMajorVersionBarrier(oldMajor, minNodeVersion));
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ public class SimilarityServiceTests extends ESTestCase {
|
|||
}
|
||||
};
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> SimilarityService.validateSimilarity(Version.V_7_0_0_alpha1, negativeScoresSim));
|
||||
() -> SimilarityService.validateSimilarity(Version.V_7_0_0, negativeScoresSim));
|
||||
assertThat(e.getMessage(), Matchers.containsString("Similarities should not return negative scores"));
|
||||
|
||||
Similarity decreasingScoresWithFreqSim = new Similarity() {
|
||||
|
@ -107,7 +107,7 @@ public class SimilarityServiceTests extends ESTestCase {
|
|||
}
|
||||
};
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> SimilarityService.validateSimilarity(Version.V_7_0_0_alpha1, decreasingScoresWithFreqSim));
|
||||
() -> SimilarityService.validateSimilarity(Version.V_7_0_0, decreasingScoresWithFreqSim));
|
||||
assertThat(e.getMessage(), Matchers.containsString("Similarity scores should not decrease when term frequency increases"));
|
||||
|
||||
Similarity increasingScoresWithNormSim = new Similarity() {
|
||||
|
@ -130,7 +130,7 @@ public class SimilarityServiceTests extends ESTestCase {
|
|||
}
|
||||
};
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> SimilarityService.validateSimilarity(Version.V_7_0_0_alpha1, increasingScoresWithNormSim));
|
||||
() -> SimilarityService.validateSimilarity(Version.V_7_0_0, increasingScoresWithNormSim));
|
||||
assertThat(e.getMessage(), Matchers.containsString("Similarity scores should not increase when norm increases"));
|
||||
}
|
||||
|
||||
|
|
|
@ -260,7 +260,7 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
.put("index.analysis.analyzer.my_standard.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.my_standard.filter", "standard")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_7_0_0_alpha1)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_7_0_0)
|
||||
.build();
|
||||
IndexAnalyzers analyzers = getIndexAnalyzers(settings);
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () ->
|
||||
|
|
|
@ -18,9 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.node;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.bootstrap.BootstrapCheck;
|
||||
import org.elasticsearch.bootstrap.BootstrapContext;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
|
@ -40,11 +38,6 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.reset;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||
|
||||
@LuceneTestCase.SuppressFileSystems(value = "ExtrasFS")
|
||||
public class NodeTests extends ESTestCase {
|
||||
|
||||
|
@ -88,30 +81,6 @@ public class NodeTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testWarnIfPreRelease() {
|
||||
final Logger logger = mock(Logger.class);
|
||||
|
||||
final int id = randomIntBetween(1, 9) * 1000000;
|
||||
final Version releaseVersion = Version.fromId(id + 99);
|
||||
final Version preReleaseVersion = Version.fromId(id + randomIntBetween(0, 98));
|
||||
|
||||
Node.warnIfPreRelease(releaseVersion, false, logger);
|
||||
verifyNoMoreInteractions(logger);
|
||||
|
||||
reset(logger);
|
||||
Node.warnIfPreRelease(releaseVersion, true, logger);
|
||||
verify(logger).warn(
|
||||
"version [{}] is a pre-release version of Elasticsearch and is not suitable for production", releaseVersion + "-SNAPSHOT");
|
||||
|
||||
reset(logger);
|
||||
final boolean isSnapshot = randomBoolean();
|
||||
Node.warnIfPreRelease(preReleaseVersion, isSnapshot, logger);
|
||||
verify(logger).warn(
|
||||
"version [{}] is a pre-release version of Elasticsearch and is not suitable for production",
|
||||
preReleaseVersion + (isSnapshot ? "-SNAPSHOT" : ""));
|
||||
|
||||
}
|
||||
|
||||
public void testNodeAttributes() throws IOException {
|
||||
String attr = randomAlphaOfLength(5);
|
||||
Settings.Builder settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "test_attr", attr);
|
||||
|
|
|
@ -1019,7 +1019,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
|||
|
||||
public void testRemoteConnectionInfoBwComp() throws IOException {
|
||||
final Version version = VersionUtils.randomVersionBetween(random(),
|
||||
Version.V_6_1_0, VersionUtils.getPreviousVersion(Version.V_7_0_0_alpha1));
|
||||
Version.V_6_1_0, VersionUtils.getPreviousVersion(Version.V_7_0_0));
|
||||
RemoteConnectionInfo expected = new RemoteConnectionInfo("test_cluster",
|
||||
Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 1)),
|
||||
4, 4, new TimeValue(30, TimeUnit.MINUTES), false);
|
||||
|
|
|
@ -293,7 +293,7 @@ public class DoSection implements ExecutableSection {
|
|||
if (matches) {
|
||||
final String message = matcher.group(1);
|
||||
// noinspection StatementWithEmptyBody
|
||||
if (masterVersion.before(Version.V_7_0_0_alpha1)
|
||||
if (masterVersion.before(Version.V_7_0_0)
|
||||
&& message.equals("the default number of shards will change from [5] to [1] in 7.0.0; "
|
||||
+ "if you wish to continue using the default of [5] shards, "
|
||||
+ "you must manage this on the create index request or with an index template")) {
|
||||
|
|
|
@ -40,7 +40,7 @@ public abstract class AbstractFullClusterRestartTestCase extends ESRestTestCase
|
|||
public void init() throws IOException {
|
||||
assertThat("we don't need this branch if we aren't compatible with 6.0",
|
||||
Version.CURRENT.minimumIndexCompatibilityVersion().onOrBefore(Version.V_6_0_0), equalTo(true));
|
||||
if (isRunningAgainstOldCluster() && getOldClusterVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (isRunningAgainstOldCluster() && getOldClusterVersion().before(Version.V_7_0_0)) {
|
||||
XContentBuilder template = jsonBuilder();
|
||||
template.startObject();
|
||||
{
|
||||
|
|
|
@ -106,7 +106,7 @@ public class IndexLifecycleMetadata implements XPackMetaDataCustom {
|
|||
|
||||
@Override
|
||||
public Version getMinimalSupportedVersion() {
|
||||
return Version.V_7_0_0_alpha1;
|
||||
return Version.V_7_0_0;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -197,7 +197,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
createTime = new Date(in.readVLong());
|
||||
finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
// for removed last_data_time field
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
if (in.readBoolean()) {
|
||||
in.readVLong();
|
||||
}
|
||||
|
@ -218,7 +218,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
Map<String, Object> readCustomSettings = in.readMap();
|
||||
customSettings = readCustomSettings == null ? null : Collections.unmodifiableMap(readCustomSettings);
|
||||
modelSnapshotId = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1) && in.readBoolean()) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0) && in.readBoolean()) {
|
||||
modelSnapshotMinVersion = Version.readVersion(in);
|
||||
} else {
|
||||
modelSnapshotMinVersion = null;
|
||||
|
@ -464,7 +464,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
// for removed last_data_time field
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
|
@ -480,7 +480,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
out.writeOptionalLong(resultsRetentionDays);
|
||||
out.writeMap(customSettings);
|
||||
out.writeOptionalString(modelSnapshotId);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
if (modelSnapshotMinVersion != null) {
|
||||
out.writeBoolean(true);
|
||||
Version.writeVersion(modelSnapshotMinVersion, out);
|
||||
|
@ -693,7 +693,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
createTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null;
|
||||
// for removed last_data_time field
|
||||
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
if (in.readBoolean()) {
|
||||
in.readVLong();
|
||||
}
|
||||
|
@ -711,7 +711,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
resultsRetentionDays = in.readOptionalLong();
|
||||
customSettings = in.readMap();
|
||||
modelSnapshotId = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1) && in.readBoolean()) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0) && in.readBoolean()) {
|
||||
modelSnapshotMinVersion = Version.readVersion(in);
|
||||
} else {
|
||||
modelSnapshotMinVersion = null;
|
||||
|
@ -886,7 +886,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
// for removed last_data_time field
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
|
||||
|
@ -902,7 +902,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
out.writeOptionalLong(resultsRetentionDays);
|
||||
out.writeMap(customSettings);
|
||||
out.writeOptionalString(modelSnapshotId);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
if (modelSnapshotMinVersion != null) {
|
||||
out.writeBoolean(true);
|
||||
Version.writeVersion(modelSnapshotMinVersion, out);
|
||||
|
|
|
@ -141,7 +141,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
} else {
|
||||
jobVersion = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1) && in.readBoolean()) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0) && in.readBoolean()) {
|
||||
modelSnapshotMinVersion = Version.readVersion(in);
|
||||
} else {
|
||||
modelSnapshotMinVersion = null;
|
||||
|
@ -183,7 +183,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
if (modelSnapshotMinVersion != null) {
|
||||
out.writeBoolean(true);
|
||||
Version.writeVersion(modelSnapshotMinVersion, out);
|
||||
|
|
|
@ -118,7 +118,7 @@ public class ModelSnapshot implements ToXContentObject, Writeable {
|
|||
|
||||
public ModelSnapshot(StreamInput in) throws IOException {
|
||||
jobId = in.readString();
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
minVersion = Version.readVersion(in);
|
||||
} else {
|
||||
minVersion = Version.CURRENT.minimumCompatibilityVersion();
|
||||
|
@ -137,7 +137,7 @@ public class ModelSnapshot implements ToXContentObject, Writeable {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
Version.writeVersion(minVersion, out);
|
||||
}
|
||||
if (timestamp != null) {
|
||||
|
|
|
@ -28,7 +28,7 @@ public final class MonitoringTemplateUtils {
|
|||
* <p>
|
||||
* It may be possible for this to diverge between templates and pipelines, but for now they're the same.
|
||||
*/
|
||||
public static final int LAST_UPDATED_VERSION = Version.V_7_0_0_alpha1.id;
|
||||
public static final int LAST_UPDATED_VERSION = Version.V_7_0_0.id;
|
||||
|
||||
/**
|
||||
* Current version of templates used in their name to differentiate from breaking changes (separate from product version).
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"index_patterns": [ ".monitoring-alerts-${monitoring.template.version}" ],
|
||||
"version": 7000001,
|
||||
"version": 7000099,
|
||||
"settings": {
|
||||
"index": {
|
||||
"number_of_shards": 1,
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"index.number_of_replicas": 0,
|
||||
"index.number_of_shards": 1
|
||||
},
|
||||
"version": 7000001,
|
||||
"version": 7000099,
|
||||
"mappings": {
|
||||
"doc": {
|
||||
"dynamic": false,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"index_patterns": [ ".monitoring-es-${monitoring.template.version}-*" ],
|
||||
"version": 7000001,
|
||||
"version": 7000099,
|
||||
"settings": {
|
||||
"index.number_of_shards": 1,
|
||||
"index.number_of_replicas": 0,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"index_patterns": [ ".monitoring-kibana-${monitoring.template.version}-*" ],
|
||||
"version": 7000001,
|
||||
"version": 7000099,
|
||||
"settings": {
|
||||
"index.number_of_shards": 1,
|
||||
"index.number_of_replicas": 0,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"index_patterns": [ ".monitoring-logstash-${monitoring.template.version}-*" ],
|
||||
"version": 7000001,
|
||||
"version": 7000099,
|
||||
"settings": {
|
||||
"index.number_of_shards": 1,
|
||||
"index.number_of_replicas": 0,
|
||||
|
|
|
@ -145,7 +145,7 @@ public class IndexDeprecationChecks {
|
|||
}
|
||||
|
||||
static DeprecationIssue delimitedPayloadFilterCheck(IndexMetaData indexMetaData) {
|
||||
if (indexMetaData.getCreationVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (indexMetaData.getCreationVersion().before(Version.V_7_0_0)) {
|
||||
List<String> issues = new ArrayList<>();
|
||||
Map<String, Settings> filters = indexMetaData.getSettings().getGroups(AnalysisRegistry.INDEX_ANALYSIS_FILTER);
|
||||
for (Map.Entry<String, Settings> entry : filters.entrySet()) {
|
||||
|
|
|
@ -22,7 +22,7 @@ import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTIN
|
|||
public class IndexDeprecationChecksTests extends ESTestCase {
|
||||
public void testDelimitedPayloadFilterCheck() throws IOException {
|
||||
Settings settings = settings(
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0_alpha1)))
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||
.put("index.analysis.filter.my_delimited_payload_filter.type", "delimited_payload_filter")
|
||||
.put("index.analysis.filter.my_delimited_payload_filter.delimiter", "^")
|
||||
.put("index.analysis.filter.my_delimited_payload_filter.encoding", "identity").build();
|
||||
|
|
|
@ -128,7 +128,7 @@ public class IndexLifecycleMetadataTests extends AbstractDiffableSerializationTe
|
|||
}
|
||||
|
||||
public void testMinimumSupportedVersion() {
|
||||
assertEquals(Version.V_7_0_0_alpha1, createTestInstance().getMinimalSupportedVersion());
|
||||
assertEquals(Version.V_7_0_0, createTestInstance().getMinimalSupportedVersion());
|
||||
}
|
||||
|
||||
public void testcontext() {
|
||||
|
|
|
@ -56,7 +56,7 @@ public class ClusterAlertsUtil {
|
|||
* The last time that all watches were updated. For now, all watches have been updated in the same version and should all be replaced
|
||||
* together.
|
||||
*/
|
||||
public static final int LAST_UPDATED_VERSION = Version.V_7_0_0_alpha1.id;
|
||||
public static final int LAST_UPDATED_VERSION = Version.V_7_0_0.id;
|
||||
|
||||
/**
|
||||
* An unsorted list of Watch IDs representing resource files for Monitoring Cluster Alerts.
|
||||
|
|
|
@ -164,7 +164,7 @@ public class HttpExporter extends Exporter {
|
|||
/**
|
||||
* Minimum supported version of the remote monitoring cluster (same major).
|
||||
*/
|
||||
public static final Version MIN_SUPPORTED_CLUSTER_VERSION = Version.V_7_0_0_alpha1;
|
||||
public static final Version MIN_SUPPORTED_CLUSTER_VERSION = Version.V_7_0_0;
|
||||
|
||||
/**
|
||||
* The {@link RestClient} automatically pools connections and keeps them alive as necessary.
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"link": "elasticsearch/indices",
|
||||
"severity": 2100,
|
||||
"type": "monitoring",
|
||||
"version_created": 7000001,
|
||||
"version_created": 7000099,
|
||||
"watch": "${monitoring.watch.id}"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"link": "elasticsearch/nodes",
|
||||
"severity": 1999,
|
||||
"type": "monitoring",
|
||||
"version_created": 7000001,
|
||||
"version_created": 7000099,
|
||||
"watch": "${monitoring.watch.id}"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"link": "elasticsearch/nodes",
|
||||
"severity": 1000,
|
||||
"type": "monitoring",
|
||||
"version_created": 7000001,
|
||||
"version_created": 7000099,
|
||||
"watch": "${monitoring.watch.id}"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"link": "kibana/instances",
|
||||
"severity": 1000,
|
||||
"type": "monitoring",
|
||||
"version_created": 7000001,
|
||||
"version_created": 7000099,
|
||||
"watch": "${monitoring.watch.id}"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"link": "logstash/instances",
|
||||
"severity": 1000,
|
||||
"type": "monitoring",
|
||||
"version_created": 7000001,
|
||||
"version_created": 7000099,
|
||||
"watch": "${monitoring.watch.id}"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
"alert_index": ".monitoring-alerts-6",
|
||||
"cluster_uuid": "${monitoring.watch.cluster_uuid}",
|
||||
"type": "monitoring",
|
||||
"version_created": 7000001,
|
||||
"version_created": 7000099,
|
||||
"watch": "${monitoring.watch.id}"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -1031,7 +1031,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
|
|||
static final class ValidateUpgradedSecurityIndex implements BiConsumer<DiscoveryNode, ClusterState> {
|
||||
@Override
|
||||
public void accept(DiscoveryNode node, ClusterState state) {
|
||||
if (state.getNodes().getMinNodeVersion().before(Version.V_7_0_0_alpha1)) {
|
||||
if (state.getNodes().getMinNodeVersion().before(Version.V_7_0_0)) {
|
||||
IndexMetaData indexMetaData = state.getMetaData().getIndices().get(SECURITY_INDEX_NAME);
|
||||
if (indexMetaData != null && INDEX_FORMAT_SETTING.get(indexMetaData.getSettings()) < INTERNAL_INDEX_FORMAT) {
|
||||
throw new IllegalStateException("Security index is not on the current version [" + INTERNAL_INDEX_FORMAT + "] - " +
|
||||
|
|
|
@ -470,6 +470,6 @@ public class ReservedRealmTests extends ESTestCase {
|
|||
assertThat(versionPredicate.test(Version.V_6_3_0), is(true));
|
||||
break;
|
||||
}
|
||||
assertThat(versionPredicate.test(Version.V_7_0_0_alpha1), is(true));
|
||||
assertThat(versionPredicate.test(Version.V_7_0_0), is(true));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,9 +12,9 @@ public class VersionTests extends ESTestCase {
|
|||
public void testVersionIsCurrent() {
|
||||
/* This test will only work properly in gradle because in gradle we run the tests
|
||||
* using the jar. */
|
||||
assertEquals(org.elasticsearch.Version.CURRENT.toString(), Version.CURRENT.version);
|
||||
assertNotNull(Version.CURRENT.hash);
|
||||
assertEquals(org.elasticsearch.Version.CURRENT.major, Version.CURRENT.major);
|
||||
assertEquals(org.elasticsearch.Version.CURRENT.minor, Version.CURRENT.minor);
|
||||
assertEquals(org.elasticsearch.Version.CURRENT.revision, Version.CURRENT.revision);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,10 +12,10 @@ public class VersionTests extends ESTestCase {
|
|||
public void testVersionIsCurrent() {
|
||||
/* This test will only work properly in gradle because in gradle we run the tests
|
||||
* using the jar. */
|
||||
assertEquals(org.elasticsearch.Version.CURRENT.toString(), Version.CURRENT.version);
|
||||
assertNotNull(Version.CURRENT.hash);
|
||||
assertEquals(org.elasticsearch.Version.CURRENT.major, Version.CURRENT.major);
|
||||
assertEquals(org.elasticsearch.Version.CURRENT.minor, Version.CURRENT.minor);
|
||||
assertEquals(org.elasticsearch.Version.CURRENT.revision, Version.CURRENT.revision);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -38,11 +38,12 @@ public class Version {
|
|||
|
||||
static byte[] from(String ver) {
|
||||
String[] parts = ver.split("[.-]");
|
||||
if (parts.length == 3 || parts.length == 4) {
|
||||
return new byte[] { Byte.parseByte(parts[0]), Byte.parseByte(parts[1]), Byte.parseByte(parts[2]) };
|
||||
// Allow for optional snapshot and qualifier
|
||||
if (parts.length < 3 || parts.length > 5) {
|
||||
throw new IllegalArgumentException("Invalid version " + ver);
|
||||
}
|
||||
else {
|
||||
throw new IllegalArgumentException("Invalid version " + ver);
|
||||
return new byte[] { Byte.parseByte(parts[0]), Byte.parseByte(parts[1]), Byte.parseByte(parts[2]) };
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ public class IndexingIT extends AbstractUpgradeTestCase {
|
|||
Version minimumIndexCompatibilityVersion = Version.CURRENT.minimumIndexCompatibilityVersion();
|
||||
assertThat("this branch is not needed if we aren't compatible with 6.0",
|
||||
minimumIndexCompatibilityVersion.onOrBefore(Version.V_6_0_0), equalTo(true));
|
||||
if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0_alpha1)) {
|
||||
if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0)) {
|
||||
XContentBuilder template = jsonBuilder();
|
||||
template.startObject();
|
||||
{
|
||||
|
|
|
@ -33,7 +33,7 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase {
|
|||
Version minimumIndexCompatibilityVersion = Version.CURRENT.minimumIndexCompatibilityVersion();
|
||||
assertThat("this branch is not needed if we aren't compatible with 6.0",
|
||||
minimumIndexCompatibilityVersion.onOrBefore(Version.V_6_0_0), equalTo(true));
|
||||
if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0_alpha1)) {
|
||||
if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0)) {
|
||||
XContentBuilder template = jsonBuilder();
|
||||
template.startObject();
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue