Merge branch 'master' into index-lifecycle

This commit is contained in:
Colin Goodheart-Smithe 2018-11-02 10:56:35 +00:00
commit fc6e1f7f3f
No known key found for this signature in database
GPG Key ID: F975E7BDD739B3C7
45 changed files with 273 additions and 267 deletions

View File

@ -229,34 +229,6 @@ Pass arbitrary jvm arguments.
./gradlew test -Dtests.jvm.argline="-Djava.security.debug=access,failure" ./gradlew test -Dtests.jvm.argline="-Djava.security.debug=access,failure"
------------------------------ ------------------------------
== Backwards Compatibility Tests
Running backwards compatibility tests is disabled by default since it
requires a release version of elasticsearch to be present on the test system.
To run backwards compatibility tests untar or unzip a release and run the tests
with the following command:
---------------------------------------------------------------------------
./gradlew test -Dtests.filter="@backwards" -Dtests.bwc.version=x.y.z -Dtests.bwc.path=/path/to/elasticsearch -Dtests.security.manager=false
---------------------------------------------------------------------------
Note that backwards tests must be run with security manager disabled.
If the elasticsearch release is placed under `./backwards/elasticsearch-x.y.z` the path
can be omitted:
---------------------------------------------------------------------------
./gradlew test -Dtests.filter="@backwards" -Dtests.bwc.version=x.y.z -Dtests.security.manager=false
---------------------------------------------------------------------------
To setup the bwc test environment execute the following steps (provided you are
already in your elasticsearch clone):
---------------------------------------------------------------------------
$ mkdir backwards && cd backwards
$ curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.2.1.tar.gz
$ tar -xzf elasticsearch-1.2.1.tar.gz
---------------------------------------------------------------------------
== Running verification tasks == Running verification tasks
To run all verification tasks, including static checks, unit tests, and integration tests: To run all verification tasks, including static checks, unit tests, and integration tests:
@ -554,25 +526,28 @@ environment variable.
== Testing backwards compatibility == Testing backwards compatibility
Backwards compatibility tests exist to test upgrading from each supported version Backwards compatibility tests exist to test upgrading from each supported version
to the current version. To run all backcompat tests use: to the current version. To run them all use:
------------------------------------------------- -------------------------------------------------
./gradlew bwcTest ./gradlew bwcTest
------------------------------------------------- -------------------------------------------------
A specific version can be tested as well. For example, to test backcompat with A specific version can be tested as well. For example, to test bwc with
version 5.3.2 run: version 5.3.2 run:
------------------------------------------------- -------------------------------------------------
./gradlew v5.3.2#bwcTest ./gradlew v5.3.2#bwcTest
------------------------------------------------- -------------------------------------------------
When running `./gradlew check`, some minimal backcompat checks are run. Which version Tests are ran for versions that are not yet released but with which the current version will be compatible with.
is tested depends on the branch. On master, this will test against the current These are automatically checked out and built from source.
stable branch. On the stable branch, it will test against the latest release See link:./buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java[VersionCollection]
branch. Finally, on a release branch, it will test against the most recent release. and link:./distribution/bwc/build.gradle[distribution/bwc/build.gradle]
for more information.
=== BWC Testing against a specific remote/branch When running `./gradlew check`, minimal bwc checks are also run against compatible versions that are not yet released.
==== BWC Testing against a specific remote/branch
Sometimes a backward compatibility change spans two versions. A common case is a new functionality Sometimes a backward compatibility change spans two versions. A common case is a new functionality
that needs a BWC bridge in an unreleased versioned of a release branch (for example, 5.x). that needs a BWC bridge in an unreleased versioned of a release branch (for example, 5.x).
@ -597,7 +572,7 @@ will contain your change.
. Push both branches to your remote repository. . Push both branches to your remote repository.
. Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`. . Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`.
== Skip fetching latest ==== Skip fetching latest
For some BWC testing scenarios, you want to use the local clone of the For some BWC testing scenarios, you want to use the local clone of the
repository without fetching latest. For these use cases, you can set the system repository without fetching latest. For these use cases, you can set the system

View File

@ -39,7 +39,7 @@ if (properties.get("org.elasticsearch.acceptScanTOS", "false") == "true") {
// common maven publishing configuration // common maven publishing configuration
subprojects { subprojects {
group = 'org.elasticsearch' group = 'org.elasticsearch'
version = VersionProperties.elasticsearch.toString() version = VersionProperties.elasticsearch
description = "Elasticsearch subproject ${project.path}" description = "Elasticsearch subproject ${project.path}"
} }
@ -282,7 +282,7 @@ subprojects {
// other packages (e.g org.elasticsearch.client) will point to server rather than // other packages (e.g org.elasticsearch.client) will point to server rather than
// their own artifacts. // their own artifacts.
if (project.plugins.hasPlugin(BuildPlugin) || project.plugins.hasPlugin(PluginBuildPlugin)) { if (project.plugins.hasPlugin(BuildPlugin) || project.plugins.hasPlugin(PluginBuildPlugin)) {
String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
Closure sortClosure = { a, b -> b.group <=> a.group } Closure sortClosure = { a, b -> b.group <=> a.group }
Closure depJavadocClosure = { shadowed, dep -> Closure depJavadocClosure = { shadowed, dep ->
if (dep.group == null || false == dep.group.startsWith('org.elasticsearch')) { if (dep.group == null || false == dep.group.startsWith('org.elasticsearch')) {

View File

@ -41,46 +41,29 @@ if (project == rootProject) {
* Propagating version.properties to the rest of the build * * Propagating version.properties to the rest of the build *
*****************************************************************************/ *****************************************************************************/
Properties props = new Properties() // we update the version property to reflect if we are building a snapshot or a release build
props.load(project.file('version.properties').newDataInputStream()) // we write this back out below to load it in the Build.java which will be shown in rest main action
version = props.getProperty('elasticsearch') // to indicate this being a snapshot build or a release build.
boolean snapshot = "true".equals(System.getProperty("build.snapshot", "true")); Properties props = VersionPropertiesLoader.loadBuildSrcVersion(project.file('version.properties'))
if (snapshot) { version = props.getProperty("elasticsearch")
// we update the version property to reflect if we are building a snapshot or a release build processResources {
// we write this back out below to load it in the Build.java which will be shown in rest main action
// to indicate this being a snapshot build or a release build.
version += "-SNAPSHOT"
props.put("elasticsearch", version);
}
File tempPropertiesFile = new File(project.buildDir, "version.properties")
task writeVersionProperties {
inputs.properties(props)
outputs.file(tempPropertiesFile)
doLast { doLast {
OutputStream stream = Files.newOutputStream(tempPropertiesFile.toPath()); Writer writer = file("$destinationDir/version.properties").newWriter()
try { try {
props.store(stream, "UTF-8"); props.store(writer, "Generated version properties")
} finally { } finally {
stream.close(); writer.close()
} }
} }
} }
processResources {
dependsOn writeVersionProperties
from tempPropertiesFile
}
if (JavaVersion.current() < JavaVersion.VERSION_1_10) {
throw new GradleException('At least Java 10 is required to build elasticsearch gradle tools')
}
/***************************************************************************** /*****************************************************************************
* Java version * * Java version *
*****************************************************************************/ *****************************************************************************/
if (JavaVersion.current() < JavaVersion.VERSION_1_10) {
throw new GradleException('At least Java 10 is required to build elasticsearch gradle tools')
}
// Gradle 4.10 does not support setting this to 11 yet // Gradle 4.10 does not support setting this to 11 yet
targetCompatibility = "10" targetCompatibility = "10"
sourceCompatibility = "10" sourceCompatibility = "10"
@ -232,3 +215,42 @@ if (project != rootProject) {
generatePomFileForPluginMavenPublication.enabled = false generatePomFileForPluginMavenPublication.enabled = false
} }
} }
// Define this here because we need it early.
class VersionPropertiesLoader {
static Properties loadBuildSrcVersion(File input) throws IOException {
Properties props = new Properties();
InputStream is = new FileInputStream(input)
try {
props.load(is)
} finally {
is.close()
}
loadBuildSrcVersion(props, System.getProperties())
return props
}
protected static void loadBuildSrcVersion(Properties loadedProps, Properties systemProperties) {
String elasticsearch = loadedProps.getProperty("elasticsearch")
if (elasticsearch == null) {
throw new IllegalStateException("Elasticsearch version is missing from properties.")
}
if (elasticsearch.matches("[0-9]+\\.[0-9]+\\.[0-9]+") == false) {
throw new IllegalStateException(
"Expected elasticsearch version to be numbers only of the form X.Y.Z but it was: " +
elasticsearch
)
}
String qualifier = systemProperties.getProperty("build.version_qualifier", "alpha1");
if (qualifier.isEmpty() == false) {
if (qualifier.matches("(alpha|beta|rc)\\d+") == false) {
throw new IllegalStateException("Invalid qualifier: " + qualifier)
}
elasticsearch += "-" + qualifier
}
if ("true".equals(systemProperties.getProperty("build.snapshot", "true"))) {
elasticsearch += "-SNAPSHOT"
}
loadedProps.put("elasticsearch", elasticsearch)
}
}

View File

@ -696,18 +696,12 @@ class BuildPlugin implements Plugin<Project> {
jarTask.destinationDir = new File(project.buildDir, 'distributions') jarTask.destinationDir = new File(project.buildDir, 'distributions')
// fixup the jar manifest // fixup the jar manifest
jarTask.doFirst { jarTask.doFirst {
final Version versionWithoutSnapshot = new Version(
VersionProperties.elasticsearch.major,
VersionProperties.elasticsearch.minor,
VersionProperties.elasticsearch.revision,
VersionProperties.elasticsearch.suffix,
false)
// this doFirst is added before the info plugin, therefore it will run // this doFirst is added before the info plugin, therefore it will run
// after the doFirst added by the info plugin, and we can override attributes // after the doFirst added by the info plugin, and we can override attributes
jarTask.manifest.attributes( jarTask.manifest.attributes(
'X-Compile-Elasticsearch-Version': versionWithoutSnapshot, 'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch.replace("-SNAPSHOT", ""),
'X-Compile-Lucene-Version': VersionProperties.lucene, 'X-Compile-Lucene-Version': VersionProperties.lucene,
'X-Compile-Elasticsearch-Snapshot': VersionProperties.elasticsearch.isSnapshot(), 'X-Compile-Elasticsearch-Snapshot': VersionProperties.isElasticsearchSnapshot(),
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC), 'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
'Build-Java-Version': project.compilerJavaVersion) 'Build-Java-Version': project.compilerJavaVersion)
if (jarTask.manifest.attributes.containsKey('Change') == false) { if (jarTask.manifest.attributes.containsKey('Change') == false) {

View File

@ -42,7 +42,7 @@ public class DocsTestPlugin extends RestTestPlugin {
* to the version being built for testing but needs to resolve to * to the version being built for testing but needs to resolve to
* the last released version for docs. */ * the last released version for docs. */
'\\{version\\}': '\\{version\\}':
VersionProperties.elasticsearch.toString().replace('-SNAPSHOT', ''), VersionProperties.elasticsearch.replace('-SNAPSHOT', ''),
'\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''), '\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''),
'\\{build_flavor\\}' : '\\{build_flavor\\}' :
project.integTestCluster.distribution.startsWith('oss-') ? 'oss' : 'default', project.integTestCluster.distribution.startsWith('oss-') ? 'oss' : 'default',

View File

@ -98,7 +98,7 @@ public class PluginBuildPlugin extends BuildPlugin {
project.pluginProperties.extension.name + "-client" project.pluginProperties.extension.name + "-client"
) )
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask -> project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-client-${project.version}.pom" generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-client-${project.versions.elasticsearch}.pom"
} }
} else { } else {
project.plugins.withType(MavenPublishPlugin).whenPluginAdded { project.plugins.withType(MavenPublishPlugin).whenPluginAdded {

View File

@ -76,7 +76,7 @@ class PluginPropertiesTask extends Copy {
'name': extension.name, 'name': extension.name,
'description': extension.description, 'description': extension.description,
'version': stringSnap(extension.version), 'version': stringSnap(extension.version),
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch.toString()), 'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch),
'javaVersion': project.targetCompatibility as String, 'javaVersion': project.targetCompatibility as String,
'classname': extension.classname, 'classname': extension.classname,
'extendedPlugins': extension.extendedPlugins.join(','), 'extendedPlugins': extension.extendedPlugins.join(','),

View File

@ -22,6 +22,7 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.JavaVersion import org.gradle.api.JavaVersion
import org.gradle.api.Project import org.gradle.api.Project
import org.gradle.api.Task import org.gradle.api.Task
@ -220,7 +221,7 @@ class PrecommitTasks {
private static Task configureLoggerUsage(Project project) { private static Task configureLoggerUsage(Project project) {
project.configurations.create('loggerUsagePlugin') project.configurations.create('loggerUsagePlugin')
project.dependencies.add('loggerUsagePlugin', project.dependencies.add('loggerUsagePlugin',
"org.elasticsearch.test:logger-usage:${org.elasticsearch.gradle.VersionProperties.elasticsearch}") "org.elasticsearch.test:logger-usage:${VersionProperties.elasticsearch}")
return project.tasks.create('loggerUsageCheck', LoggerUsageTask.class) { return project.tasks.create('loggerUsageCheck', LoggerUsageTask.class) {
classpath = project.configurations.loggerUsagePlugin classpath = project.configurations.loggerUsagePlugin
javaHome = project.runtimeJavaHome javaHome = project.runtimeJavaHome

View File

@ -101,7 +101,7 @@ class ClusterFormationTasks {
// from here on everything else works the same as if it's the current version, we fetch the BWC version // from here on everything else works the same as if it's the current version, we fetch the BWC version
// from mirrors using gradles built-in mechanism etc. // from mirrors using gradles built-in mechanism etc.
configureDistributionDependency(project, config.distribution, bwcDistro, config.bwcVersion) configureDistributionDependency(project, config.distribution, bwcDistro, config.bwcVersion.toString())
for (Map.Entry<String, Object> entry : config.plugins.entrySet()) { for (Map.Entry<String, Object> entry : config.plugins.entrySet()) {
configureBwcPluginDependency(project, entry.getValue(), bwcPlugins, config.bwcVersion) configureBwcPluginDependency(project, entry.getValue(), bwcPlugins, config.bwcVersion)
} }
@ -112,9 +112,12 @@ class ClusterFormationTasks {
// we start N nodes and out of these N nodes there might be M bwc nodes. // we start N nodes and out of these N nodes there might be M bwc nodes.
// for each of those nodes we might have a different configuration // for each of those nodes we might have a different configuration
final Configuration distro final Configuration distro
final Version elasticsearchVersion final String elasticsearchVersion
if (i < config.numBwcNodes) { if (i < config.numBwcNodes) {
elasticsearchVersion = config.bwcVersion elasticsearchVersion = config.bwcVersion.toString()
if (project.bwcVersions.unreleased.contains(config.bwcVersion)) {
elasticsearchVersion += "-SNAPSHOT"
}
distro = bwcDistro distro = bwcDistro
} else { } else {
elasticsearchVersion = VersionProperties.elasticsearch elasticsearchVersion = VersionProperties.elasticsearch
@ -156,8 +159,10 @@ class ClusterFormationTasks {
} }
/** Adds a dependency on the given distribution */ /** Adds a dependency on the given distribution */
static void configureDistributionDependency(Project project, String distro, Configuration configuration, Version elasticsearchVersion) { static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) {
if (elasticsearchVersion.before('6.3.0') && distro.startsWith('oss-')) { if (Version.fromString(elasticsearchVersion).before('6.3.0') &&
distro.startsWith('oss-')
) {
distro = distro.substring('oss-'.length()) distro = distro.substring('oss-'.length())
} }
String packaging = distro String packaging = distro
@ -227,7 +232,7 @@ class ClusterFormationTasks {
setup = configureAddKeystoreFileTasks(prefix, project, setup, node) setup = configureAddKeystoreFileTasks(prefix, project, setup, node)
if (node.config.plugins.isEmpty() == false) { if (node.config.plugins.isEmpty() == false) {
if (node.nodeVersion == VersionProperties.elasticsearch) { if (node.nodeVersion == Version.fromString(VersionProperties.elasticsearch)) {
setup = configureCopyPluginsTask(taskName(prefix, node, 'copyPlugins'), project, setup, node, prefix) setup = configureCopyPluginsTask(taskName(prefix, node, 'copyPlugins'), project, setup, node, prefix)
} else { } else {
setup = configureCopyBwcPluginsTask(taskName(prefix, node, 'copyBwcPlugins'), project, setup, node, prefix) setup = configureCopyBwcPluginsTask(taskName(prefix, node, 'copyBwcPlugins'), project, setup, node, prefix)
@ -591,7 +596,7 @@ class ClusterFormationTasks {
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, String pluginName, String prefix) { static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, String pluginName, String prefix) {
final FileCollection pluginZip; final FileCollection pluginZip;
if (node.nodeVersion != VersionProperties.elasticsearch) { if (node.nodeVersion != Version.fromString(VersionProperties.elasticsearch)) {
pluginZip = project.configurations.getByName(pluginBwcConfigurationName(prefix, pluginName)) pluginZip = project.configurations.getByName(pluginBwcConfigurationName(prefix, pluginName))
} else { } else {
pluginZip = project.configurations.getByName(pluginConfigurationName(prefix, pluginName)) pluginZip = project.configurations.getByName(pluginConfigurationName(prefix, pluginName))

View File

@ -112,7 +112,7 @@ class NodeInfo {
Version nodeVersion Version nodeVersion
/** Holds node configuration for part of a test cluster. */ /** Holds node configuration for part of a test cluster. */
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, Version nodeVersion, File sharedDir) { NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, String nodeVersion, File sharedDir) {
this.config = config this.config = config
this.nodeNum = nodeNum this.nodeNum = nodeNum
this.project = project this.project = project
@ -124,7 +124,7 @@ class NodeInfo {
} }
baseDir = new File(project.buildDir, "cluster/${prefix} node${nodeNum}") baseDir = new File(project.buildDir, "cluster/${prefix} node${nodeNum}")
pidFile = new File(baseDir, 'es.pid') pidFile = new File(baseDir, 'es.pid')
this.nodeVersion = nodeVersion this.nodeVersion = Version.fromString(nodeVersion)
homeDir = homeDir(baseDir, config.distribution, nodeVersion) homeDir = homeDir(baseDir, config.distribution, nodeVersion)
pathConf = pathConf(baseDir, config.distribution, nodeVersion) pathConf = pathConf(baseDir, config.distribution, nodeVersion)
if (config.dataDir != null) { if (config.dataDir != null) {
@ -173,11 +173,11 @@ class NodeInfo {
} }
if (nodeVersion.before("6.2.0")) { if (this.nodeVersion.before("6.2.0")) {
javaVersion = 8 javaVersion = 8
} else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) { } else if (this.nodeVersion.onOrAfter("6.2.0") && this.nodeVersion.before("6.3.0")) {
javaVersion = 9 javaVersion = 9
} else if (nodeVersion.onOrAfter("6.3.0") && nodeVersion.before("6.5.0")) { } else if (this.nodeVersion.onOrAfter("6.3.0") && this.nodeVersion.before("6.5.0")) {
javaVersion = 10 javaVersion = 10
} }
@ -301,7 +301,7 @@ class NodeInfo {
} }
/** Returns the directory elasticsearch home is contained in for the given distribution */ /** Returns the directory elasticsearch home is contained in for the given distribution */
static File homeDir(File baseDir, String distro, Version nodeVersion) { static File homeDir(File baseDir, String distro, String nodeVersion) {
String path String path
switch (distro) { switch (distro) {
case 'integ-test-zip': case 'integ-test-zip':
@ -321,7 +321,7 @@ class NodeInfo {
return new File(baseDir, path) return new File(baseDir, path)
} }
static File pathConf(File baseDir, String distro, Version nodeVersion) { static File pathConf(File baseDir, String distro, String nodeVersion) {
switch (distro) { switch (distro) {
case 'integ-test-zip': case 'integ-test-zip':
case 'zip': case 'zip':

View File

@ -12,28 +12,17 @@ public final class Version implements Comparable<Version> {
private final int minor; private final int minor;
private final int revision; private final int revision;
private final int id; private final int id;
private final boolean snapshot;
/**
* Suffix on the version name.
*/
private final String suffix;
private static final Pattern pattern = private static final Pattern pattern =
Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-alpha\\d+|-beta\\d+|-rc\\d+)?(-SNAPSHOT)?"); Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-alpha\\d+|-beta\\d+|-rc\\d+)?(-SNAPSHOT)?");
public Version(int major, int minor, int revision) { public Version(int major, int minor, int revision) {
this(major, minor, revision, "", false);
}
public Version(int major, int minor, int revision, String suffix, boolean snapshot) {
Objects.requireNonNull(major, "major version can't be null"); Objects.requireNonNull(major, "major version can't be null");
Objects.requireNonNull(minor, "minor version can't be null"); Objects.requireNonNull(minor, "minor version can't be null");
Objects.requireNonNull(revision, "revision version can't be null"); Objects.requireNonNull(revision, "revision version can't be null");
this.major = major; this.major = major;
this.minor = minor; this.minor = minor;
this.revision = revision; this.revision = revision;
this.snapshot = snapshot;
this.suffix = suffix == null ? "" : suffix;
// currently snapshot is not taken into account // currently snapshot is not taken into account
this.id = major * 10000000 + minor * 100000 + revision * 1000; this.id = major * 10000000 + minor * 100000 + revision * 1000;
@ -58,17 +47,13 @@ public final class Version implements Comparable<Version> {
return new Version( return new Version(
Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(1)),
parseSuffixNumber(matcher.group(2)), parseSuffixNumber(matcher.group(2)),
parseSuffixNumber(matcher.group(3)), parseSuffixNumber(matcher.group(3))
matcher.group(4),
matcher.group(5) != null
); );
} }
@Override @Override
public String toString() { public String toString() {
final String snapshotStr = snapshot ? "-SNAPSHOT" : ""; return String.valueOf(getMajor()) + "." + String.valueOf(getMinor()) + "." + String.valueOf(getRevision());
return String.valueOf(getMajor()) + "." + String.valueOf(getMinor()) + "." + String.valueOf(getRevision()) +
(suffix == null ? "" : suffix) + snapshotStr;
} }
public boolean before(Version compareTo) { public boolean before(Version compareTo) {
@ -103,19 +88,6 @@ public final class Version implements Comparable<Version> {
return after(fromString(compareTo)); return after(fromString(compareTo));
} }
public boolean onOrBeforeIncludingSuffix(Version otherVersion) {
if (id != otherVersion.getId()) {
return id < otherVersion.getId();
}
if (suffix.equals("")) {
return otherVersion.getSuffix().equals("");
}
return otherVersion.getSuffix().equals("") || suffix.compareTo(otherVersion.getSuffix()) < 0;
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
@ -128,8 +100,7 @@ public final class Version implements Comparable<Version> {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(major, minor, revision, id);
return Objects.hash(major, minor, revision, id, snapshot, suffix);
} }
public int getMajor() { public int getMajor() {
@ -148,14 +119,6 @@ public final class Version implements Comparable<Version> {
return id; return id;
} }
public boolean isSnapshot() {
return snapshot;
}
public String getSuffix() {
return suffix;
}
@Override @Override
public int compareTo(Version other) { public int compareTo(Version other) {
return Integer.compare(getId(), other.getId()); return Integer.compare(getId(), other.getId());

View File

@ -100,7 +100,7 @@ public class VersionCollection {
} }
public VersionCollection(List<String> versionLines) { public VersionCollection(List<String> versionLines) {
this(versionLines, VersionProperties.getElasticsearch()); this(versionLines, Version.fromString(VersionProperties.getElasticsearch()));
} }
protected VersionCollection(List<String> versionLines, Version currentVersionProperty) { protected VersionCollection(List<String> versionLines, Version currentVersionProperty) {
@ -110,12 +110,10 @@ public class VersionCollection {
.map(match -> new Version( .map(match -> new Version(
Integer.parseInt(match.group(1)), Integer.parseInt(match.group(1)),
Integer.parseInt(match.group(2)), Integer.parseInt(match.group(2)),
Integer.parseInt(match.group(3)), Integer.parseInt(match.group(3))
(match.group(4) == null ? "" : match.group(4)).replace('_', '-'),
false
)) ))
.sorted() .sorted()
.filter(version -> version.getSuffix().isEmpty() || version.equals(currentVersionProperty)) .distinct()
.collect(Collectors.groupingBy(Version::getMajor, Collectors.toList())); .collect(Collectors.groupingBy(Version::getMajor, Collectors.toList()));
if (groupByMajor.isEmpty()) { if (groupByMajor.isEmpty()) {
@ -131,22 +129,11 @@ public class VersionCollection {
assertCurrentVersionMatchesParsed(currentVersionProperty); assertCurrentVersionMatchesParsed(currentVersionProperty);
assertNoOlderThanTwoMajors(); assertNoOlderThanTwoMajors();
markUnreleasedAsSnapshot();
}
private void markUnreleasedAsSnapshot() {
getUnreleased().forEach(uv ->
groupByMajor.get(uv.getMajor()).set(
groupByMajor.get(uv.getMajor()).indexOf(uv),
new Version(uv.getMajor(), uv.getMinor(), uv.getRevision(),uv.getSuffix(), true)
)
);
} }
private void assertNoOlderThanTwoMajors() { private void assertNoOlderThanTwoMajors() {
Set<Integer> majors = groupByMajor.keySet(); Set<Integer> majors = groupByMajor.keySet();
if (majors.size() != 2 && currentVersion.getMinor() != 0 && currentVersion.getMajor() != 0) { if (majors.size() != 2 && currentVersion.getMinor() != 0 && currentVersion.getRevision() != 0) {
throw new IllegalStateException( throw new IllegalStateException(
"Expected exactly 2 majors in parsed versions but found: " + majors "Expected exactly 2 majors in parsed versions but found: " + majors
); );

View File

@ -10,7 +10,7 @@ import java.util.Properties;
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions. * Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
*/ */
public class VersionProperties { public class VersionProperties {
public static Version getElasticsearch() { public static String getElasticsearch() {
return elasticsearch; return elasticsearch;
} }
@ -22,12 +22,12 @@ public class VersionProperties {
return versions; return versions;
} }
private static final Version elasticsearch; private static final String elasticsearch;
private static final String lucene; private static final String lucene;
private static final Map<String, String> versions = new HashMap<String, String>(); private static final Map<String, String> versions = new HashMap<String, String>();
static { static {
Properties props = getVersionProperties(); Properties props = getVersionProperties();
elasticsearch = Version.fromString(props.getProperty("elasticsearch")); elasticsearch = props.getProperty("elasticsearch");
lucene = props.getProperty("lucene"); lucene = props.getProperty("lucene");
for (String property : props.stringPropertyNames()) { for (String property : props.stringPropertyNames()) {
versions.put(property, props.getProperty(property)); versions.put(property, props.getProperty(property));
@ -38,13 +38,17 @@ public class VersionProperties {
Properties props = new Properties(); Properties props = new Properties();
InputStream propsStream = VersionProperties.class.getResourceAsStream("/version.properties"); InputStream propsStream = VersionProperties.class.getResourceAsStream("/version.properties");
if (propsStream == null) { if (propsStream == null) {
throw new RuntimeException("/version.properties resource missing"); throw new IllegalStateException("/version.properties resource missing");
} }
try { try {
props.load(propsStream); props.load(propsStream);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new IllegalStateException("Failed to load version properties", e);
} }
return props; return props;
} }
public static boolean isElasticsearchSnapshot() {
return elasticsearch.endsWith("-SNAPSHOT");
}
} }

View File

@ -101,7 +101,7 @@ public class VersionCollectionTests extends GradleUnitTestCase {
formatVersionToLine("6.5.0"), formatVersionToLine("6.5.0"),
formatVersionToLine("7.0.0") formatVersionToLine("7.0.0")
), ),
Version.fromString("7.0.0") Version.fromString("6.5.0")
); );
} }

View File

@ -33,11 +33,11 @@ public class VersionTests extends GradleUnitTestCase {
public ExpectedException expectedEx = ExpectedException.none(); public ExpectedException expectedEx = ExpectedException.none();
public void testVersionParsing() { public void testVersionParsing() {
assertVersionEquals("7.0.1", 7, 0, 1, "", false); assertVersionEquals("7.0.1", 7, 0, 1);
assertVersionEquals("7.0.1-alpha2", 7, 0, 1, "-alpha2", false); assertVersionEquals("7.0.1-alpha2", 7, 0, 1);
assertVersionEquals("5.1.2-rc3", 5, 1, 2, "-rc3", false); assertVersionEquals("5.1.2-rc3", 5, 1, 2);
assertVersionEquals("6.1.2-SNAPSHOT", 6, 1, 2, "", true); assertVersionEquals("6.1.2-SNAPSHOT", 6, 1, 2);
assertVersionEquals("6.1.2-beta1-SNAPSHOT", 6, 1, 2, "-beta1", true); assertVersionEquals("6.1.2-beta1-SNAPSHOT", 6, 1, 2);
} }
public void testCompareWithStringVersions() { public void testCompareWithStringVersions() {
@ -74,21 +74,12 @@ public class VersionTests extends GradleUnitTestCase {
} }
public void testToString() { public void testToString() {
assertEquals("7.0.1", new Version(7, 0, 1, null, false).toString()); assertEquals("7.0.1", new Version(7, 0, 1).toString());
} }
public void testCompareVersions() { public void testCompareVersions() {
assertEquals(0, new Version(7, 0, 0, null, true).compareTo( assertEquals(0,
new Version(7, 0, 0, null, true) new Version(7, 0, 0).compareTo(new Version(7, 0, 0))
));
assertEquals(0, new Version(7, 0, 0, null, true).compareTo(
new Version(7, 0, 0, "", true)
));
assertEquals(
0,
new Version(7, 0, 0, "-alpha1", false).compareTo(
new Version(7, 0, 0, "", true))
); );
} }
@ -108,17 +99,11 @@ public class VersionTests extends GradleUnitTestCase {
assertEquals(smaller + " should be smaller than " + bigger, -1, smaller.compareTo(bigger)); assertEquals(smaller + " should be smaller than " + bigger, -1, smaller.compareTo(bigger));
} }
private void assertVersionEquals(String stringVersion, int major, int minor, int revision, String sufix, boolean snapshot) { private void assertVersionEquals(String stringVersion, int major, int minor, int revision) {
Version version = Version.fromString(stringVersion); Version version = Version.fromString(stringVersion);
assertEquals(major, version.getMajor()); assertEquals(major, version.getMajor());
assertEquals(minor, version.getMinor()); assertEquals(minor, version.getMinor());
assertEquals(revision, version.getRevision()); assertEquals(revision, version.getRevision());
if (snapshot) {
assertTrue("Expected version to be a snapshot but it was not", version.isSnapshot());
} else {
assertFalse("Expected version not to be a snapshot but it was", version.isSnapshot());
}
assertEquals(sufix, version.getSuffix());
} }
} }

View File

@ -1,4 +1,4 @@
elasticsearch = 7.0.0-alpha1 elasticsearch = 7.0.0
lucene = 8.0.0-snapshot-7d0a7782fa lucene = 8.0.0-snapshot-7d0a7782fa
# optional dependencies # optional dependencies

View File

@ -129,10 +129,10 @@ bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unre
baseDir += project == 'zip' ? '/archives' : '/packages' baseDir += project == 'zip' ? '/archives' : '/packages'
// add oss variant first // add oss variant first
projectDirs.add("${baseDir}/oss-${project}") projectDirs.add("${baseDir}/oss-${project}")
artifactFiles.add(file("${checkoutDir}/${baseDir}/oss-${project}/build/distributions/elasticsearch-oss-${bwcVersion}.${project}")) artifactFiles.add(file("${checkoutDir}/${baseDir}/oss-${project}/build/distributions/elasticsearch-oss-${bwcVersion}-SNAPSHOT.${project}"))
} }
projectDirs.add("${baseDir}/${project}") projectDirs.add("${baseDir}/${project}")
artifactFiles.add(file("${checkoutDir}/${baseDir}/${project}/build/distributions/elasticsearch-${bwcVersion}.${project}")) artifactFiles.add(file("${checkoutDir}/${baseDir}/${project}/build/distributions/elasticsearch-${bwcVersion}-SNAPSHOT.${project}"))
} }
task buildBwcVersion(type: Exec) { task buildBwcVersion(type: Exec) {

View File

@ -3,7 +3,9 @@
[[configuring-metricbeat]] [[configuring-metricbeat]]
=== Monitoring {es} with {metricbeat} === Monitoring {es} with {metricbeat}
beta[] In 6.5 and later, you can use {metricbeat} to collect data about {es} beta[]
In 6.5 and later, you can use {metricbeat} to collect data about {es}
and ship it to the monitoring cluster, rather than routing it through exporters and ship it to the monitoring cluster, rather than routing it through exporters
as described in <<configuring-monitoring>>. as described in <<configuring-monitoring>>.

View File

@ -118,6 +118,10 @@ The above request will yield the following response:
<2> The `_percolator_document_slot` field indicates which document has matched with this query. <2> The `_percolator_document_slot` field indicates which document has matched with this query.
Useful when percolating multiple document simultaneously. Useful when percolating multiple document simultaneously.
TIP: To provide a simple example, this documentation uses one index `my-index` for both the percolate queries and documents.
This set-up can work well when there are just a few percolate queries registered. However, with heavier usage it is recommended
to store queries and documents in separate indices. Please see <<how-it-works, How it Works Under the Hood>> for more details.
[float] [float]
==== Parameters ==== Parameters
@ -643,6 +647,7 @@ The above search request returns a response similar to this:
query with `_name` parameter set to `query1`. query with `_name` parameter set to `query1`.
[float] [float]
[[how-it-works]]
==== How it Works Under the Hood ==== How it Works Under the Hood
When indexing a document into an index that has the <<percolator,percolator field type>> mapping configured, the query When indexing a document into an index that has the <<percolator,percolator field type>> mapping configured, the query
@ -679,3 +684,11 @@ GET /_search
NOTE: The above example assumes that there is a `query` field of type NOTE: The above example assumes that there is a `query` field of type
`percolator` in the mappings. `percolator` in the mappings.
Given the design of percolation, it often makes sense to use separate indices for the percolate queries and documents
being percolated, as opposed to a single index as we do in examples. There are a few benefits to this approach:
- Because percolate queries contain a different set of fields from the percolated documents, using two separate indices
allows for fields to be stored in a denser, more efficient way.
- Percolate queries do not scale in the same way as other queries, so percolation performance may benefit from using
a different index configuration, like the number of primary shards.

View File

@ -77,11 +77,11 @@ opening spend more time in the `opening` state. Defaults to `2`.
These settings are for advanced use cases; the default values are generally These settings are for advanced use cases; the default values are generally
sufficient: sufficient:
`xpack.ml.max_anomaly_records`:: (<<cluster-update-settings,Dynamic>>) `xpack.ml.max_anomaly_records` (<<cluster-update-settings,Dynamic>>)::
The maximum number of records that are output per bucket. The default value is The maximum number of records that are output per bucket. The default value is
`500`. `500`.
`xpack.ml.max_lazy_ml_nodes`:: (<<cluster-update-settings,Dynamic>>) `xpack.ml.max_lazy_ml_nodes` (<<cluster-update-settings,Dynamic>>)::
The number of lazily spun up Machine Learning nodes. Useful in situations The number of lazily spun up Machine Learning nodes. Useful in situations
where ML nodes are not desired until the first Machine Learning Job where ML nodes are not desired until the first Machine Learning Job
is opened. It defaults to `0` and has a maximum acceptable value of `3`. is opened. It defaults to `0` and has a maximum acceptable value of `3`.

View File

@ -43,14 +43,14 @@ to `true`. Its default value is `false`.
The `xpack.monitoring.collection` settings control how data is collected from The `xpack.monitoring.collection` settings control how data is collected from
your Elasticsearch nodes. your Elasticsearch nodes.
`xpack.monitoring.collection.enabled`:: (<<cluster-update-settings,Dynamic>>) `xpack.monitoring.collection.enabled` (<<cluster-update-settings,Dynamic>>)::
added[6.3.0] Set to `true` to enable the collection of monitoring data. When added[6.3.0] Set to `true` to enable the collection of monitoring data. When
this setting is `false` (default), {es} monitoring data is not collected and this setting is `false` (default), {es} monitoring data is not collected and
all monitoring data from other sources such as {kib}, Beats, and Logstash is all monitoring data from other sources such as {kib}, Beats, and Logstash is
ignored. ignored.
`xpack.monitoring.collection.interval`:: (<<cluster-update-settings,Dynamic>>) `xpack.monitoring.collection.interval` (<<cluster-update-settings,Dynamic>>)::
Setting to `-1` to disable data collection is no longer supported beginning with Setting to `-1` to disable data collection is no longer supported beginning with
7.0.0. deprecated[6.3.0, Use `xpack.monitoring.collection.enabled` set to 7.0.0. deprecated[6.3.0, Use `xpack.monitoring.collection.enabled` set to
@ -60,7 +60,7 @@ Controls how often data samples are collected. Defaults to `10s`. If you
modify the collection interval, set the `xpack.monitoring.min_interval_seconds` modify the collection interval, set the `xpack.monitoring.min_interval_seconds`
option in `kibana.yml` to the same value. option in `kibana.yml` to the same value.
`xpack.monitoring.elasticsearch.collection.enabled`:: (<<cluster-update-settings,Dynamic>>) `xpack.monitoring.elasticsearch.collection.enabled` (<<cluster-update-settings,Dynamic>>)::
Controls whether statistics about your {es} cluster should be collected. Defaults to `true`. Controls whether statistics about your {es} cluster should be collected. Defaults to `true`.
This is different from xpack.monitoring.collection.enabled, which allows you to enable or disable This is different from xpack.monitoring.collection.enabled, which allows you to enable or disable
@ -72,7 +72,7 @@ to pass through this cluster.
Sets the timeout for collecting the cluster statistics. Defaults to `10s`. Sets the timeout for collecting the cluster statistics. Defaults to `10s`.
`xpack.monitoring.collection.indices`:: (<<cluster-update-settings,Dynamic>>) `xpack.monitoring.collection.indices` (<<cluster-update-settings,Dynamic>>)::
Controls which indices Monitoring collects data from. Defaults to all indices. Specify the index names Controls which indices Monitoring collects data from. Defaults to all indices. Specify the index names
as a comma-separated list, for example `test1,test2,test3`. Names can include wildcards, for as a comma-separated list, for example `test1,test2,test3`. Names can include wildcards, for

View File

@ -11,7 +11,7 @@ setup:
settings: settings:
bucket: ${bucket} bucket: ${bucket}
client: "integration_test" client: "integration_test"
base_path: ${base_path} base_path: "${base_path}"
--- ---
"Snapshot/Restore with repository-gcs": "Snapshot/Restore with repository-gcs":
@ -23,7 +23,7 @@ setup:
- match: { repository.settings.bucket : ${bucket} } - match: { repository.settings.bucket : ${bucket} }
- match: { repository.settings.client : "integration_test" } - match: { repository.settings.client : "integration_test" }
- match: { repository.settings.base_path : ${base_path} } - match: { repository.settings.base_path : "${base_path}" }
# Index documents # Index documents
- do: - do:

View File

@ -12,7 +12,7 @@ setup:
settings: settings:
bucket: ${permanent_bucket} bucket: ${permanent_bucket}
client: integration_test_permanent client: integration_test_permanent
base_path: ${permanent_base_path} base_path: "${permanent_base_path}"
canned_acl: private canned_acl: private
storage_class: standard storage_class: standard
@ -26,7 +26,7 @@ setup:
- match: { repository_permanent.settings.bucket : ${permanent_bucket} } - match: { repository_permanent.settings.bucket : ${permanent_bucket} }
- match: { repository_permanent.settings.client : "integration_test_permanent" } - match: { repository_permanent.settings.client : "integration_test_permanent" }
- match: { repository_permanent.settings.base_path : ${permanent_base_path} } - match: { repository_permanent.settings.base_path : "${permanent_base_path}" }
- match: { repository_permanent.settings.canned_acl : "private" } - match: { repository_permanent.settings.canned_acl : "private" }
- match: { repository_permanent.settings.storage_class : "standard" } - match: { repository_permanent.settings.storage_class : "standard" }
- is_false: repository_permanent.settings.access_key - is_false: repository_permanent.settings.access_key

View File

@ -12,7 +12,7 @@ setup:
settings: settings:
bucket: ${temporary_bucket} bucket: ${temporary_bucket}
client: integration_test_temporary client: integration_test_temporary
base_path: ${temporary_base_path} base_path: "${temporary_base_path}"
canned_acl: private canned_acl: private
storage_class: standard storage_class: standard
@ -26,7 +26,7 @@ setup:
- match: { repository_temporary.settings.bucket : ${temporary_bucket} } - match: { repository_temporary.settings.bucket : ${temporary_bucket} }
- match: { repository_temporary.settings.client : "integration_test_temporary" } - match: { repository_temporary.settings.client : "integration_test_temporary" }
- match: { repository_temporary.settings.base_path : ${temporary_base_path} } - match: { repository_temporary.settings.base_path : "${temporary_base_path}" }
- match: { repository_temporary.settings.canned_acl : "private" } - match: { repository_temporary.settings.canned_acl : "private" }
- match: { repository_temporary.settings.storage_class : "standard" } - match: { repository_temporary.settings.storage_class : "standard" }
- is_false: repository_temporary.settings.access_key - is_false: repository_temporary.settings.access_key

View File

@ -12,7 +12,7 @@ setup:
settings: settings:
bucket: ${ec2_bucket} bucket: ${ec2_bucket}
client: integration_test_ec2 client: integration_test_ec2
base_path: ${ec2_base_path} base_path: "${ec2_base_path}"
canned_acl: private canned_acl: private
storage_class: standard storage_class: standard
@ -26,7 +26,7 @@ setup:
- match: { repository_ec2.settings.bucket : ${ec2_bucket} } - match: { repository_ec2.settings.bucket : ${ec2_bucket} }
- match: { repository_ec2.settings.client : "integration_test_ec2" } - match: { repository_ec2.settings.client : "integration_test_ec2" }
- match: { repository_ec2.settings.base_path : ${ec2_base_path} } - match: { repository_ec2.settings.base_path : "${ec2_base_path}" }
- match: { repository_ec2.settings.canned_acl : "private" } - match: { repository_ec2.settings.canned_acl : "private" }
- match: { repository_ec2.settings.storage_class : "standard" } - match: { repository_ec2.settings.storage_class : "standard" }
- is_false: repository_ec2.settings.access_key - is_false: repository_ec2.settings.access_key

View File

@ -12,7 +12,7 @@ setup:
settings: settings:
bucket: ${ecs_bucket} bucket: ${ecs_bucket}
client: integration_test_ecs client: integration_test_ecs
base_path: ${ecs_base_path} base_path: "${ecs_base_path}"
canned_acl: private canned_acl: private
storage_class: standard storage_class: standard
@ -26,7 +26,7 @@ setup:
- match: { repository_ecs.settings.bucket : ${ecs_bucket} } - match: { repository_ecs.settings.bucket : ${ecs_bucket} }
- match: { repository_ecs.settings.client : "integration_test_ecs" } - match: { repository_ecs.settings.client : "integration_test_ecs" }
- match: { repository_ecs.settings.base_path : ${ecs_base_path} } - match: { repository_ecs.settings.base_path : "${ecs_base_path}" }
- match: { repository_ecs.settings.canned_acl : "private" } - match: { repository_ecs.settings.canned_acl : "private" }
- match: { repository_ecs.settings.storage_class : "standard" } - match: { repository_ecs.settings.storage_class : "standard" }
- is_false: repository_ecs.settings.access_key - is_false: repository_ecs.settings.access_key

View File

@ -107,6 +107,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
* search for each query. It then does some basic sanity checking of score and hits * search for each query. It then does some basic sanity checking of score and hits
* to make sure the profiling doesn't interfere with the hits being returned * to make sure the profiling doesn't interfere with the hits being returned
*/ */
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32492")
public void testProfileMatchesRegular() throws Exception { public void testProfileMatchesRegular() throws Exception {
createIndex("test"); createIndex("test");
ensureGreen(); ensureGreen();

View File

@ -71,4 +71,5 @@ precommit.dependsOn namingConventionsMain
test.configure { test.configure {
systemProperty 'tests.gradle_index_compat_versions', bwcVersions.indexCompatible.join(',') systemProperty 'tests.gradle_index_compat_versions', bwcVersions.indexCompatible.join(',')
systemProperty 'tests.gradle_wire_compat_versions', bwcVersions.wireCompatible.join(',') systemProperty 'tests.gradle_wire_compat_versions', bwcVersions.wireCompatible.join(',')
systemProperty 'tests.gradle_unreleased_versions', bwcVersions.unreleased.join(',')
} }

View File

@ -413,7 +413,6 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
* Test creates the {@link Query} from the {@link QueryBuilder} under test and delegates the * Test creates the {@link Query} from the {@link QueryBuilder} under test and delegates the
* assertions being made on the result to the implementing subclass. * assertions being made on the result to the implementing subclass.
*/ */
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34188")
public void testToQuery() throws IOException { public void testToQuery() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) {
QueryShardContext context = createShardContext(); QueryShardContext context = createShardContext();

View File

@ -24,8 +24,10 @@ import org.elasticsearch.common.collect.Tuple;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Set;
import static java.util.stream.Collectors.toCollection; import static java.util.stream.Collectors.toCollection;
import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toList;
@ -389,13 +391,19 @@ public class VersionUtilsTests extends ESTestCase {
private final List<String> unreleased = new ArrayList<>(); private final List<String> unreleased = new ArrayList<>();
private VersionsFromProperty(String property) { private VersionsFromProperty(String property) {
Set<String> allUnreleased = new HashSet<>(Arrays.asList(
System.getProperty("tests.gradle_unreleased_versions", "").split(",")
));
if (allUnreleased.isEmpty()) {
fail("[tests.gradle_unreleased_versions] not set or empty. Gradle should set this before running.");
}
String versions = System.getProperty(property); String versions = System.getProperty(property);
assertNotNull("Couldn't find [" + property + "]. Gradle should set these before running the tests.", versions); assertNotNull("Couldn't find [" + property + "]. Gradle should set this before running the tests.", versions);
logger.info("Looked up versions [{}={}]", property, versions); logger.info("Looked up versions [{}={}]", property, versions);
for (String version : versions.split(",")) { for (String version : versions.split(",")) {
if (version.endsWith("-SNAPSHOT")) { if (allUnreleased.contains(version)) {
unreleased.add(version.replace("-SNAPSHOT", "")); unreleased.add(version);
} else { } else {
released.add(version); released.add(version);
} }

View File

@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.LicenseUtils;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
@ -28,6 +29,7 @@ import org.elasticsearch.xpack.core.ccr.AutoFollowStats;
import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class TransportCcrStatsAction extends TransportMasterNodeAction<CcrStatsAction.Request, CcrStatsAction.Response> { public class TransportCcrStatsAction extends TransportMasterNodeAction<CcrStatsAction.Request, CcrStatsAction.Response> {
@ -70,7 +72,12 @@ public class TransportCcrStatsAction extends TransportMasterNodeAction<CcrStatsA
@Override @Override
protected CcrStatsAction.Response newResponse() { protected CcrStatsAction.Response newResponse() {
return new CcrStatsAction.Response(); throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
protected CcrStatsAction.Response read(StreamInput in) throws IOException {
return new CcrStatsAction.Response(in);
} }
@Override @Override

View File

@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -24,6 +25,7 @@ import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction;
import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.Map; import java.util.Map;
@ -48,7 +50,12 @@ public class TransportGetAutoFollowPatternAction
@Override @Override
protected GetAutoFollowPatternAction.Response newResponse() { protected GetAutoFollowPatternAction.Response newResponse() {
return new GetAutoFollowPatternAction.Response(); throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
protected GetAutoFollowPatternAction.Response read(StreamInput in) throws IOException {
return new GetAutoFollowPatternAction.Response(in);
} }
@Override @Override

View File

@ -27,6 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.LicenseUtils;
@ -38,6 +39,7 @@ import org.elasticsearch.xpack.ccr.CcrSettings;
import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction;
import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
@ -83,7 +85,12 @@ public final class TransportPutFollowAction
@Override @Override
protected PutFollowAction.Response newResponse() { protected PutFollowAction.Response newResponse() {
return new PutFollowAction.Response(); throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
protected PutFollowAction.Response read(StreamInput in) throws IOException {
return new PutFollowAction.Response(in);
} }
@Override @Override

View File

@ -6,17 +6,19 @@
package org.elasticsearch.xpack.ccr; package org.elasticsearch.xpack.ccr;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.xpack.CcrIntegTestCase; import org.elasticsearch.xpack.CcrIntegTestCase;
import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction;
@ -48,15 +50,15 @@ public class FollowerFailOverIT extends CcrIntegTestCase {
for (int i = 0; i < threads.length; i++) { for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(() -> { threads[i] = new Thread(() -> {
while (stopped.get() == false) { while (stopped.get() == false) {
try { if (frequently()) {
if (frequently()) { String id = Integer.toString(frequently() ? docID.incrementAndGet() : between(0, 10)); // sometimes update
String id = Integer.toString(frequently() ? docID.incrementAndGet() : between(0, 10)); // sometimes update IndexResponse indexResponse = leaderClient().prepareIndex("leader-index", "doc", id)
leaderClient().prepareIndex("leader-index", "doc", id).setSource("{\"f\":" + id + "}", XContentType.JSON).get(); .setSource("{\"f\":" + id + "}", XContentType.JSON).get();
} else { logger.info("--> index id={} seq_no={}", indexResponse.getId(), indexResponse.getSeqNo());
String id = Integer.toString(between(0, docID.get())); } else {
leaderClient().prepareDelete("leader-index", "doc", id).get(); String id = Integer.toString(between(0, docID.get()));
} DeleteResponse deleteResponse = leaderClient().prepareDelete("leader-index", "doc", id).get();
} catch (NodeClosedException ignored) { logger.info("--> delete id={} seq_no={}", deleteResponse.getId(), deleteResponse.getSeqNo());
} }
} }
}); });
@ -69,6 +71,7 @@ public class FollowerFailOverIT extends CcrIntegTestCase {
follow.getFollowRequest().setMaxWriteRequestOperationCount(randomIntBetween(32, 2048)); follow.getFollowRequest().setMaxWriteRequestOperationCount(randomIntBetween(32, 2048));
follow.getFollowRequest().setMaxWriteRequestSize(new ByteSizeValue(randomIntBetween(1, 4096), ByteSizeUnit.KB)); follow.getFollowRequest().setMaxWriteRequestSize(new ByteSizeValue(randomIntBetween(1, 4096), ByteSizeUnit.KB));
follow.getFollowRequest().setMaxOutstandingWriteRequests(randomIntBetween(1, 10)); follow.getFollowRequest().setMaxOutstandingWriteRequests(randomIntBetween(1, 10));
logger.info("--> follow params {}", Strings.toString(follow.getFollowRequest()));
followerClient().execute(PutFollowAction.INSTANCE, follow).get(); followerClient().execute(PutFollowAction.INSTANCE, follow).get();
ensureFollowerGreen("follower-index"); ensureFollowerGreen("follower-index");
atLeastDocsIndexed(followerClient(), "follower-index", between(20, 60)); atLeastDocsIndexed(followerClient(), "follower-index", between(20, 60));

View File

@ -5,7 +5,8 @@
*/ */
package org.elasticsearch.xpack.ccr.action; package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.ccr.AutoFollowStats; import org.elasticsearch.xpack.core.ccr.AutoFollowStats;
import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
@ -13,11 +14,11 @@ import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomReadExceptions; import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomReadExceptions;
import static org.elasticsearch.xpack.ccr.action.StatsResponsesTests.createStatsResponse; import static org.elasticsearch.xpack.ccr.action.StatsResponsesTests.createStatsResponse;
public class AutoFollowStatsResponseTests extends AbstractStreamableTestCase<CcrStatsAction.Response> { public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCase<CcrStatsAction.Response> {
@Override @Override
protected CcrStatsAction.Response createBlankInstance() { protected Writeable.Reader<CcrStatsAction.Response> instanceReader() {
return new CcrStatsAction.Response(); return CcrStatsAction.Response::new;
} }
@Override @Override

View File

@ -5,10 +5,11 @@
*/ */
package org.elasticsearch.xpack.ccr.action; package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction;
@ -16,11 +17,11 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
public class GetAutoFollowPatternResponseTests extends AbstractStreamableTestCase<GetAutoFollowPatternAction.Response> { public class GetAutoFollowPatternResponseTests extends AbstractWireSerializingTestCase<GetAutoFollowPatternAction.Response> {
@Override @Override
protected GetAutoFollowPatternAction.Response createBlankInstance() { protected Writeable.Reader<GetAutoFollowPatternAction.Response> instanceReader() {
return new GetAutoFollowPatternAction.Response(); return GetAutoFollowPatternAction.Response::new;
} }
@Override @Override

View File

@ -5,14 +5,15 @@
*/ */
package org.elasticsearch.xpack.ccr.action; package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction;
public class PutFollowActionResponseTests extends AbstractStreamableTestCase<PutFollowAction.Response> { public class PutFollowActionResponseTests extends AbstractWireSerializingTestCase<PutFollowAction.Response> {
@Override @Override
protected PutFollowAction.Response createBlankInstance() { protected Writeable.Reader<PutFollowAction.Response> instanceReader() {
return new PutFollowAction.Response(); return PutFollowAction.Response::new;
} }
@Override @Override

View File

@ -12,6 +12,7 @@ import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ccr.AutoFollowStats; import org.elasticsearch.xpack.core.ccr.AutoFollowStats;
@ -30,7 +31,12 @@ public class CcrStatsAction extends Action<CcrStatsAction.Response> {
@Override @Override
public Response newResponse() { public Response newResponse() {
return new Response(); throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
public Writeable.Reader<Response> getResponseReader() {
return Response::new;
} }
public static class Request extends MasterNodeRequest<Request> { public static class Request extends MasterNodeRequest<Request> {
@ -55,15 +61,19 @@ public class CcrStatsAction extends Action<CcrStatsAction.Response> {
public static class Response extends ActionResponse implements ToXContentObject { public static class Response extends ActionResponse implements ToXContentObject {
private AutoFollowStats autoFollowStats; private final AutoFollowStats autoFollowStats;
private FollowStatsAction.StatsResponses followStats; private final FollowStatsAction.StatsResponses followStats;
public Response(AutoFollowStats autoFollowStats, FollowStatsAction.StatsResponses followStats) { public Response(AutoFollowStats autoFollowStats, FollowStatsAction.StatsResponses followStats) {
this.autoFollowStats = Objects.requireNonNull(autoFollowStats); this.autoFollowStats = Objects.requireNonNull(autoFollowStats);
this.followStats = Objects.requireNonNull(followStats); this.followStats = Objects.requireNonNull(followStats);
} }
public Response() { public Response(StreamInput in) throws IOException {
super(in);
autoFollowStats = new AutoFollowStats(in);
followStats = new FollowStatsAction.StatsResponses();
followStats.readFrom(in);
} }
public AutoFollowStats getAutoFollowStats() { public AutoFollowStats getAutoFollowStats() {
@ -74,14 +84,6 @@ public class CcrStatsAction extends Action<CcrStatsAction.Response> {
return followStats; return followStats;
} }
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
autoFollowStats = new AutoFollowStats(in);
followStats = new FollowStatsAction.StatsResponses();
followStats.readFrom(in);
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);

View File

@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern;
@ -31,7 +32,12 @@ public class GetAutoFollowPatternAction extends Action<GetAutoFollowPatternActio
@Override @Override
public Response newResponse() { public Response newResponse() {
return new Response(); throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
public Writeable.Reader<Response> getResponseReader() {
return Response::new;
} }
public static class Request extends MasterNodeReadRequest<Request> { public static class Request extends MasterNodeReadRequest<Request> {
@ -81,21 +87,17 @@ public class GetAutoFollowPatternAction extends Action<GetAutoFollowPatternActio
public static class Response extends ActionResponse implements ToXContentObject { public static class Response extends ActionResponse implements ToXContentObject {
private Map<String, AutoFollowPattern> autoFollowPatterns; private final Map<String, AutoFollowPattern> autoFollowPatterns;
public Response(Map<String, AutoFollowPattern> autoFollowPatterns) { public Response(Map<String, AutoFollowPattern> autoFollowPatterns) {
this.autoFollowPatterns = autoFollowPatterns; this.autoFollowPatterns = autoFollowPatterns;
} }
public Response() {
}
public Map<String, AutoFollowPattern> getAutoFollowPatterns() { public Map<String, AutoFollowPattern> getAutoFollowPatterns() {
return autoFollowPatterns; return autoFollowPatterns;
} }
@Override public Response(StreamInput in) throws IOException {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
autoFollowPatterns = in.readMap(StreamInput::readString, AutoFollowPattern::new); autoFollowPatterns = in.readMap(StreamInput::readString, AutoFollowPattern::new);
} }

View File

@ -15,6 +15,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
@ -49,7 +50,12 @@ public final class PutFollowAction extends Action<PutFollowAction.Response> {
@Override @Override
public Response newResponse() { public Response newResponse() {
return new Response(); throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
}
@Override
public Writeable.Reader<Response> getResponseReader() {
return Response::new;
} }
public static class Request extends AcknowledgedRequest<Request> implements IndicesRequest, ToXContentObject { public static class Request extends AcknowledgedRequest<Request> implements IndicesRequest, ToXContentObject {
@ -211,13 +217,9 @@ public final class PutFollowAction extends Action<PutFollowAction.Response> {
public static class Response extends ActionResponse implements ToXContentObject { public static class Response extends ActionResponse implements ToXContentObject {
private boolean followIndexCreated; private final boolean followIndexCreated;
private boolean followIndexShardsAcked; private final boolean followIndexShardsAcked;
private boolean indexFollowingStarted; private final boolean indexFollowingStarted;
public Response() {
}
public Response(boolean followIndexCreated, boolean followIndexShardsAcked, boolean indexFollowingStarted) { public Response(boolean followIndexCreated, boolean followIndexShardsAcked, boolean indexFollowingStarted) {
this.followIndexCreated = followIndexCreated; this.followIndexCreated = followIndexCreated;
@ -237,9 +239,8 @@ public final class PutFollowAction extends Action<PutFollowAction.Response> {
return indexFollowingStarted; return indexFollowingStarted;
} }
@Override public Response(StreamInput in) throws IOException {
public void readFrom(StreamInput in) throws IOException { super(in);
super.readFrom(in);
followIndexCreated = in.readBoolean(); followIndexCreated = in.readBoolean();
followIndexShardsAcked = in.readBoolean(); followIndexShardsAcked = in.readBoolean();
indexFollowingStarted = in.readBoolean(); indexFollowingStarted = in.readBoolean();

View File

@ -107,9 +107,9 @@ public class In extends NamedExpression implements ScriptWeaver {
@Override @Override
public ScriptTemplate asScript() { public ScriptTemplate asScript() {
ScriptTemplate leftScript = asScript(value); ScriptTemplate leftScript = asScript(value);
// remove duplicates
// fold & remove duplicates
List<Object> values = new ArrayList<>(new LinkedHashSet<>(Foldables.valuesOf(list, value.dataType()))); List<Object> values = new ArrayList<>(new LinkedHashSet<>(Foldables.valuesOf(list, value.dataType())));
values.removeIf(Objects::isNull);
return new ScriptTemplate( return new ScriptTemplate(
formatTemplate(String.format(Locale.ROOT, "{sql}.in(%s, {})", leftScript.template())), formatTemplate(String.format(Locale.ROOT, "{sql}.in(%s, {})", leftScript.template())),
@ -141,6 +141,6 @@ public class In extends NamedExpression implements ScriptWeaver {
In other = (In) obj; In other = (In) obj;
return Objects.equals(value, other.value) return Objects.equals(value, other.value)
&& Objects.equals(list, other.list); && Objects.equals(list, other.list);
} }
} }

View File

@ -213,7 +213,7 @@ public class QueryTranslatorTests extends ESTestCase {
assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(" + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(" +
"InternalSqlScriptUtils.power(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1), params.v2))", "InternalSqlScriptUtils.power(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1), params.v2))",
sc.script().toString()); sc.script().toString());
assertEquals("[{v=int}, {v=2}, {v=[10.0, 20.0]}]", sc.script().params().toString()); assertEquals("[{v=int}, {v=2}, {v=[10.0, null, 20.0]}]", sc.script().params().toString());
} }
public void testTranslateInExpression_HavingClause_Painless() { public void testTranslateInExpression_HavingClause_Painless() {
@ -259,6 +259,6 @@ public class QueryTranslatorTests extends ESTestCase {
assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(params.a0, params.v0))", assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(params.a0, params.v0))",
aggFilter.scriptTemplate().toString()); aggFilter.scriptTemplate().toString());
assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=MAX(int){a->")); assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=MAX(int){a->"));
assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=[10, 20, 30]}]")); assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=[10, null, 20, 30]}]"));
} }
} }

View File

@ -142,7 +142,12 @@ subprojects {
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
dependsOn copyTestNodeKeystore dependsOn copyTestNodeKeystore
if (version.before('6.3.0')) { if (version.before('6.3.0')) {
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}" String depVersion = version;
if (project.bwcVersions.unreleased.contains(version)) {
depVersion += "-SNAPSHOT"
}
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}"
} }
bwcVersion = version bwcVersion = version
numBwcNodes = 2 numBwcNodes = 2

View File

@ -30,7 +30,12 @@ for (Version version : bwcVersions.wireCompatible) {
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
if (version.before('6.3.0')) { if (version.before('6.3.0')) {
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}" String depVersion = version;
if (project.bwcVersions.unreleased.contains(version)) {
depVersion += "-SNAPSHOT"
}
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}"
} }
bwcVersion = version bwcVersion = version
numBwcNodes = 3 numBwcNodes = 3

View File

@ -124,7 +124,11 @@ subprojects {
configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { configure(extensions.findByName("${baseName}#oldClusterTestCluster")) {
dependsOn copyTestNodeKeystore dependsOn copyTestNodeKeystore
if (version.before('6.3.0')) { if (version.before('6.3.0')) {
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}" String depVersion = version;
if (project.bwcVersions.unreleased.contains(version)) {
depVersion += "-SNAPSHOT"
}
mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${depVersion}"
} }
String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users' String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users'
setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser'